prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_malloc_72b.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_malloc_72b.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__delete.label.xml
Template File: sources-sinks-72b.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: malloc Allocate data using malloc()
* GoodSource: Allocate data using new
* Sinks:
* GoodSink: Deallocate data using free()
* BadSink : Deallocate data using delete
* Flow Variant: 72 Data flow: data passed in a vector from one function to another in different source files
*
* */
<|fim▁hole|>
namespace CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_malloc_72
{
#ifndef OMITBAD
void badSink(vector<int64_t *> dataVector)
{
/* copy data out of dataVector */
int64_t * data = dataVector[2];
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(vector<int64_t *> dataVector)
{
int64_t * data = dataVector[2];
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
/* goodB2G uses the BadSource with the GoodSink */
void goodB2GSink(vector<int64_t *> dataVector)
{
int64_t * data = dataVector[2];
/* FIX: Deallocate the memory using free() */
free(data);
}
#endif /* OMITGOOD */
} /* close namespace */<|fim▁end|> | #include "std_testcase.h"
#include <vector>
using namespace std;
|
<|file_name|>input.go<|end_file_name|><|fim▁begin|>package audio
import (
"encoding/binary"
"io"
"sync"
"player/logger"
)
// A input takes an audio input source and writes it to
// an audio output source
type Input struct {
// Audo input
input io.Reader
// Audio output
output Writer
// Orchestration channels
stopC chan bool // Stop reading the source
resumeC chan bool // Resume reading the source
endC chan bool // bool sent when finished
// Close orchestration
closeC chan bool
closeWg *sync.WaitGroup
}
// Reads the input audo source input and writes it to the
// audo output writer
func (i *Input) play() {
logger.Debug("playing audio input")
defer logger.Debug("stopped audio input")
defer i.closeWg.Done()
defer func(i *Input) {
logger.Debug("audio input complete")
i.endC <- true
}(i)
for {
select {
case <-i.stopC:
select {
case <-i.closeC:
return
case <-i.resumeC:
continue
}
case <-i.closeC:
return
default:<|fim▁hole|> case io.ErrShortBuffer:
continue // Wait for the buffer to fill
case io.EOF, io.ErrUnexpectedEOF:
return // We have completed reading the reader
default:
logger.WithError(err).Error("unexpected audio input read error")
return
}
}
if _, err := i.output.Write(frames); err != nil {
logger.WithError(err).Error("unexpected audio input write error")
return
}
}
}
}
// Starts the audip input play coroutine
func (i *Input) Play() {
defer logger.Debug("play audio input")
i.closeWg.Add(1)
go i.play()
}
// Stop the input - stops reading the input audio source so no more
// data is written to the input writer
func (i *Input) Stop() {
defer logger.Debug("stop audio input")
i.stopC <- true
}
// Resume the input - resumes reading the input audio source to
// the input writer
func (i *Input) Resume() {
logger.Debug("resume audio input")
i.resumeC <- true
}
// Returns the End of the input
func (i *Input) End() <-chan bool {
return (<-chan bool)(i.endC)
}
// Stops playing a input midway through playback
func (i *Input) Close() {
defer logger.Debug("input ")
close(i.closeC)
i.closeWg.Wait()
}
// Create a new input
func NewInput(i io.Reader, o Writer) *Input {
return &Input{
// I/O
input: i,
output: o,
// Orchestration Channels
stopC: make(chan bool, 1),
resumeC: make(chan bool, 1),
endC: make(chan bool, 1),
// Close orchestration
closeC: make(chan bool, 1),
closeWg: &sync.WaitGroup{},
}
}<|fim▁end|> | frames := make([]int16, FRAMES_PER_BUFFER)
if err := binary.Read(i.input, binary.LittleEndian, frames); err != nil {
switch err { |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
# Create your views here.
from .models import Course, Student, StudentCourse
from .serializers import CourseSerializer, StudentSerialiser
from rest_framework import viewsets
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
class StudentViewSet(viewsets.ModelViewSet):
queryset = Student.objects.all()
serializer_class = StudentSerialiser<|fim▁hole|> @list_route(methods=['GET'])
def make(self, request):
username = request.GET.get('username', None)
if username:
Student.objects.get_or_create(nickname=username)
return Response({'success': True})
class CourseViewSet(viewsets.ModelViewSet):
queryset = Course.objects.all()
serializer_class = CourseSerializer
def get_queryset(self):
result = super(CourseViewSet, self).get_queryset()
username = self.request.GET.get('username', None)
active = self.request.GET.get('active', None)
if not username or active != '1':
return result
user = Student.objects.get(nickname=username)
courses_ids = StudentCourse.objects.filter(student=user, active=True).values_list('course_id', flat=True)
return result.filter(id__in=courses_ids)
@detail_route(methods=['GET'])
def start(self, request, pk=None):
username = request.GET.get('username', None)
user = Student.objects.get(nickname=username)
course = Course.objects.get(id=pk)
student_course, created = StudentCourse.objects.get_or_create(student=user, course=course)
StudentCourse.objects.filter(student=user).update(active=False)
student_course.active = True
student_course.save()
return Response({'success': True})<|fim▁end|> | |
<|file_name|>u32x4.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
#![cfg_attr(feature = "cargo-clippy", allow(inline_always))]
use simdty::u32x4;
#[cfg(feature = "simd_opt")]
#[inline(always)]
pub fn rotate_right_const(vec: u32x4, n: u32) -> u32x4 {
match n {
16 => rotate_right_16(vec),
8 => rotate_right_8(vec),
_ => rotate_right_any(vec, n),
}
}
#[cfg(not(feature = "simd_opt"))]
#[inline(always)]
pub fn rotate_right_const(vec: u32x4, n: u32) -> u32x4 {
rotate_right_any(vec, n)
}
#[inline(always)]
fn rotate_right_any(vec: u32x4, n: u32) -> u32x4 {
let r = n as u32;
let l = 32 - r;
(vec >> u32x4::new(r, r, r, r)) ^ (vec << u32x4::new(l, l, l, l))
}
#[cfg(feature = "simd_opt")]
#[inline(always)]
fn rotate_right_16(vec: u32x4) -> u32x4 {
if cfg!(target_feature = "ssse3") {
// pshufb (SSSE3) / vpshufb (AVX2)
transmute_shuffle!(u8x16, simd_shuffle16, vec,
[ 2, 3, 0, 1,
6, 7, 4, 5,
10, 11, 8, 9,
14, 15, 12, 13])
} else if cfg!(any(target_feature = "sse2", target_feature = "neon")) {
// pshuflw+pshufhw (SSE2) / vrev (NEON)
transmute_shuffle!(u16x8, simd_shuffle8, vec,
[1, 0,
3, 2,
5, 4,
7, 6])
} else {
rotate_right_any(vec, 16)
}
}
#[cfg(feature = "simd_opt")]
#[inline(always)]
fn rotate_right_8(vec: u32x4) -> u32x4 {
if cfg!(target_feature = "ssse3") {
// pshufb (SSSE3) / vpshufb (AVX2)
transmute_shuffle!(u8x16, simd_shuffle16, vec,
[ 1, 2, 3, 0,
5, 6, 7, 4,
9, 10, 11, 8,
13, 14, 15, 12])
} else {
rotate_right_any(vec, 8)
}
}<|fim▁end|> | // Copyright 2015 blake2-rfc Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or |
<|file_name|>VenueHash.ts<|end_file_name|><|fim▁begin|>import {LocationHash} from "./LocationHash";
/**
* Created by hypfer on 08.06.17.<|fim▁hole|> return "VenueHash";
}
Title: string;
Address: string;
Foursquare_id: string;
constructor(id: string, ownerID: number, DbId: string, Source: number, Public: Boolean,
latitude: number, longitude: number,
title: string, address: string, foursquare_id: string) {
super(id, ownerID, DbId, Source, Public, latitude, longitude);
this.Title = title;
this.Address = address;
this.Foursquare_id = foursquare_id;
}
}<|fim▁end|> | */
export class VenueHash extends LocationHash {
protected getHashType(): string { |
<|file_name|>imports.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use Indent;
use utils;
use syntax::codemap::{self, BytePos, Span};
use codemap::SpanUtils;
use lists::{write_list, itemize_list, ListItem, ListFormatting, SeparatorTactic, definitive_tactic};
use types::rewrite_path;
use rewrite::{Rewrite, RewriteContext};
use visitor::FmtVisitor;
use std::cmp::{self, Ordering};
use syntax::{ast, ptr};
fn path_of(a: &ast::ViewPath_) -> &ast::Path {
match a {
&ast::ViewPath_::ViewPathSimple(_, ref p) => p,
&ast::ViewPath_::ViewPathGlob(ref p) => p,
&ast::ViewPath_::ViewPathList(ref p, _) => p,
}
}
fn compare_path_segments(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
a.identifier.name.as_str().cmp(&b.identifier.name.as_str())
}
fn compare_paths(a: &ast::Path, b: &ast::Path) -> Ordering {
for segment in a.segments.iter().zip(b.segments.iter()) {
let ord = compare_path_segments(segment.0, segment.1);
if ord != Ordering::Equal {
return ord;
}
}
a.segments.len().cmp(&b.segments.len())
}
fn compare_path_list_items(a: &ast::PathListItem, b: &ast::PathListItem) -> Ordering {
let name_ordering = match a.node.name() {
Some(a_name) => {
match b.node.name() {
Some(b_name) => a_name.name.as_str().cmp(&b_name.name.as_str()),
None => Ordering::Greater,
}
}
None => {
match b.node.name() {
Some(_) => Ordering::Less,
None => Ordering::Equal,
}
}
};
if name_ordering == Ordering::Equal {
match a.node.rename() {
Some(a_rename) => {
match b.node.rename() {
Some(b_rename) => a_rename.name.as_str().cmp(&b_rename.name.as_str()),
None => Ordering::Greater,
}
}
None => {
match b.node.name() {
Some(_) => Ordering::Less,
None => Ordering::Equal,
}
}
}
} else {
name_ordering
}
}
fn compare_path_list_item_lists(a_items: &Vec<ast::PathListItem>,
b_items: &Vec<ast::PathListItem>)
-> Ordering {
let mut a = a_items.clone();
let mut b = b_items.clone();
a.sort_by(|a, b| compare_path_list_items(a, b));
b.sort_by(|a, b| compare_path_list_items(a, b));
for comparison_pair in a.iter().zip(b.iter()) {
let ord = compare_path_list_items(comparison_pair.0, comparison_pair.1);
if ord != Ordering::Equal {
return ord;
}
}
a.len().cmp(&b.len())
}
fn compare_view_path_types(a: &ast::ViewPath_, b: &ast::ViewPath_) -> Ordering {
use syntax::ast::ViewPath_::*;
match (a, b) {
(&ViewPathSimple(..), &ViewPathSimple(..)) => Ordering::Equal,
(&ViewPathSimple(..), _) => Ordering::Less,
(&ViewPathGlob(_), &ViewPathSimple(..)) => Ordering::Greater,
(&ViewPathGlob(_), &ViewPathGlob(_)) => Ordering::Equal,
(&ViewPathGlob(_), &ViewPathList(..)) => Ordering::Less,
(&ViewPathList(_, ref a_items), &ViewPathList(_, ref b_items)) => {
compare_path_list_item_lists(a_items, b_items)
}
(&ViewPathList(..), _) => Ordering::Greater,
}
}
fn compare_view_paths(a: &ast::ViewPath_, b: &ast::ViewPath_) -> Ordering {
match compare_paths(path_of(a), path_of(b)) {
Ordering::Equal => compare_view_path_types(a, b),
cmp => cmp,
}
}
fn compare_use_items(a: &ast::Item, b: &ast::Item) -> Option<Ordering> {
match (&a.node, &b.node) {
(&ast::ItemKind::Use(ref a_vp), &ast::ItemKind::Use(ref b_vp)) => {
Some(compare_view_paths(&a_vp.node, &b_vp.node))
}
_ => None,
}
}
// TODO (some day) remove unused imports, expand globs, compress many single
// imports into a list import.
impl Rewrite for ast::ViewPath {
// Returns an empty string when the ViewPath is empty (like foo::bar::{})
fn rewrite(&self, context: &RewriteContext, width: usize, offset: Indent) -> Option<String> {
match self.node {
ast::ViewPath_::ViewPathList(_, ref path_list) if path_list.is_empty() => {
Some(String::new())
}
ast::ViewPath_::ViewPathList(ref path, ref path_list) => {
rewrite_use_list(width, offset, path, path_list, self.span, context)
}
ast::ViewPath_::ViewPathGlob(_) => {
// FIXME convert to list?
None
}
ast::ViewPath_::ViewPathSimple(ident, ref path) => {
let ident_str = ident.to_string();
// 4 = " as ".len()
let budget = try_opt!(width.checked_sub(ident_str.len() + 4));
let path_str = try_opt!(rewrite_path(context, false, None, path, budget, offset));
Some(if path.segments.last().unwrap().identifier == ident {
path_str
} else {
format!("{} as {}", path_str, ident_str)
})
}
}
}
}
impl<'a> FmtVisitor<'a> {
pub fn format_imports(&mut self, use_items: &[ptr::P<ast::Item>]) {
// Find the location immediately before the first use item in the run. This must not lie
// before the current `self.last_pos`
let pos_before_first_use_item = use_items.first()
.map(|p_i| cmp::max(self.last_pos, p_i.span.lo))
.unwrap_or(self.last_pos);
// Construct a list of pairs, each containing a `use` item and the start of span before
// that `use` item.
let mut last_pos_of_prev_use_item = pos_before_first_use_item;
let mut ordered_use_items = use_items.iter()
.map(|p_i| {
let new_item = (&*p_i, last_pos_of_prev_use_item);
last_pos_of_prev_use_item = p_i.span.hi;
new_item
})
.collect::<Vec<_>>();
let pos_after_last_use_item = last_pos_of_prev_use_item;
// Order the imports by view-path & other import path properties
ordered_use_items.sort_by(|a, b| compare_use_items(a.0, b.0).unwrap());
// First, output the span before the first import
let prev_span_str = self.snippet(codemap::mk_sp(self.last_pos, pos_before_first_use_item));
// Look for purely trailing space at the start of the prefix snippet before a linefeed, or
// a prefix that's entirely horizontal whitespace.
let prefix_span_start = match prev_span_str.find('\n') {
Some(offset) if prev_span_str[..offset].trim().is_empty() => {
self.last_pos + BytePos(offset as u32)
}
None if prev_span_str.trim().is_empty() => pos_before_first_use_item,
_ => self.last_pos,
};
// Look for indent (the line part preceding the use is all whitespace) and excise that
// from the prefix
let span_end = match prev_span_str.rfind('\n') {
Some(offset) if prev_span_str[offset..].trim().is_empty() => {
self.last_pos + BytePos(offset as u32)
}
_ => pos_before_first_use_item,
};
self.last_pos = prefix_span_start;
self.format_missing(span_end);
for ordered in ordered_use_items {
// Fake out the formatter by setting `self.last_pos` to the appropriate location before
// each item before visiting it.
self.last_pos = ordered.1;
self.visit_item(&ordered.0);
}
self.last_pos = pos_after_last_use_item;
}
pub fn format_import(&mut self, vis: &ast::Visibility, vp: &ast::ViewPath, span: Span) {
let vis = utils::format_visibility(vis);
let mut offset = self.block_indent;
offset.alignment += vis.len() + "use ".len();
// 1 = ";"
match vp.rewrite(&self.get_context(),
self.config.max_width - offset.width() - 1,
offset) {
Some(ref s) if s.is_empty() => {
// Format up to last newline
let prev_span = codemap::mk_sp(self.last_pos, source!(self, span).lo);
let span_end = match self.snippet(prev_span).rfind('\n') {
Some(offset) => self.last_pos + BytePos(offset as u32),
None => source!(self, span).lo,
};
self.format_missing(span_end);
self.last_pos = source!(self, span).hi;
}
Some(ref s) => {
let s = format!("{}use {};", vis, s);
self.format_missing_with_indent(source!(self, span).lo);
self.buffer.push_str(&s);
self.last_pos = source!(self, span).hi;
}
None => {
self.format_missing_with_indent(source!(self, span).lo);
self.format_missing(source!(self, span).hi);
}
}
}
}
fn rewrite_single_use_list(path_str: Option<String>, vpi: &ast::PathListItem) -> String {
let path_item_str = if let ast::PathListItemKind::Ident { name, .. } = vpi.node {
// A name.
match path_str {
Some(path_str) => format!("{}::{}", path_str, name),
None => name.to_string(),
}
} else {
// `self`.
match path_str {
Some(path_str) => path_str,
// This catches the import: use {self}, which is a compiler error, so we just
// leave it alone.
None => "{self}".to_owned(),
}
};
append_alias(path_item_str, vpi)
}
fn rewrite_path_item(vpi: &&ast::PathListItem) -> Option<String> {
let path_item_str = match vpi.node {
ast::PathListItemKind::Ident { name, .. } => name.to_string(),
ast::PathListItemKind::Mod { .. } => "self".to_owned(),
};
Some(append_alias(path_item_str, vpi))
}
fn append_alias(path_item_str: String, vpi: &ast::PathListItem) -> String {
match vpi.node {
ast::PathListItemKind::Ident { rename: Some(rename), .. } |
ast::PathListItemKind::Mod { rename: Some(rename), .. } => {
format!("{} as {}", path_item_str, rename)
}
_ => path_item_str,
}
}
// Pretty prints a multi-item import.
// Assumes that path_list.len() > 0.
pub fn rewrite_use_list(width: usize,
offset: Indent,
path: &ast::Path,
path_list: &[ast::PathListItem],
span: Span,
context: &RewriteContext)
-> Option<String> {
// Returns a different option to distinguish `::foo` and `foo`
let opt_path_str = if !path.to_string().is_empty() {
Some(path.to_string())
} else if path.global {
// path is absolute, we return an empty String to avoid a double `::`
Some(String::new())
} else {
None
};
match path_list.len() {
0 => unreachable!(),
1 => return Some(rewrite_single_use_list(opt_path_str, &path_list[0])),
_ => (),
}
// 2 = ::
let path_separation_w = if opt_path_str.is_some() { 2 } else { 0 };
// 1 = {
let supp_indent = path.to_string().len() + path_separation_w + 1;
// 1 = }
let remaining_width = width.checked_sub(supp_indent + 1).unwrap_or(0);
let mut items = {
// Dummy value, see explanation below.
let mut items = vec![ListItem::from_str("")];
let iter = itemize_list(context.codemap,
path_list.iter(),
"}",
|vpi| vpi.span.lo,
|vpi| vpi.span.hi,
rewrite_path_item,
context.codemap.span_after(span, "{"),
span.hi);
items.extend(iter);
items
};
// We prefixed the item list with a dummy value so that we can
// potentially move "self" to the front of the vector without touching
// the rest of the items.
let has_self = move_self_to_front(&mut items);
let first_index = if has_self { 0 } else { 1 };
if context.config.reorder_imported_names {
items[1..].sort_by(|a, b| a.item.cmp(&b.item));
}
let tactic = definitive_tactic(&items[first_index..],
::lists::ListTactic::Mixed,
remaining_width);
let fmt = ListFormatting {
tactic: tactic,
separator: ",",
trailing_separator: SeparatorTactic::Never,
indent: offset + supp_indent,
// FIXME This is too conservative, and will not use all width
// available
// (loose 1 column (";"))
width: remaining_width,
ends_with_newline: false,
config: context.config,
};
let list_str = try_opt!(write_list(&items[first_index..], &fmt));
Some(match opt_path_str {
Some(opt_path_str) => format!("{}::{{{}}}", opt_path_str, list_str),
None => format!("{{{}}}", list_str),
})
}
// Returns true when self item was found.
fn move_self_to_front(items: &mut Vec<ListItem>) -> bool {
match items.iter().position(|item| item.item.as_ref().map(|x| &x[..]) == Some("self")) {<|fim▁hole|> Some(pos) => {
items[0] = items.remove(pos);
true
}
None => false,
}
}<|fim▁end|> | |
<|file_name|>create-internal-ssl-certs.py<|end_file_name|><|fim▁begin|># This script has to run using the Python executable found in:
# /opt/mgmtworker/env/bin/python in order to properly load the manager
# blueprints utils.py module.
import argparse
import logging
import utils
class CtxWithLogger(object):
logger = logging.getLogger('internal-ssl-certs-logger')
<|fim▁hole|>
parser = argparse.ArgumentParser()
parser.add_argument('--metadata', default=utils.CERT_METADATA_FILE_PATH,
help='File containing the cert metadata. It should be a '
'JSON file containing an object with the '
'"internal_rest_host" and "networks" fields.')
parser.add_argument('manager_ip', default=None, nargs='?',
help='The IP of this machine on the default network')
if __name__ == '__main__':
args = parser.parse_args()
cert_metadata = utils.load_cert_metadata(filename=args.metadata)
internal_rest_host = args.manager_ip or cert_metadata['internal_rest_host']
networks = cert_metadata.get('networks', {})
networks['default'] = internal_rest_host
cert_ips = [internal_rest_host] + list(networks.values())
utils.generate_internal_ssl_cert(ips=cert_ips, name=internal_rest_host)
utils.store_cert_metadata(internal_rest_host, networks,
filename=args.metadata)<|fim▁end|> |
utils.ctx = CtxWithLogger() |
<|file_name|>3_biathlontoken.js<|end_file_name|><|fim▁begin|>const Nodelist = artifacts.require("./Nodelist.sol");
const BiathlonNode = artifacts.require("./BiathlonNode.sol");
const SecondNode = artifacts.require("./SecondNode.sol");
const BiathlonToken = artifacts.require("./BiathlonToken.sol");
const Ownable = artifacts.require('../contracts/ownership/Ownable.sol');
// const MintableToken = artifacts.require('MintableToken.sol');
const SecondBiathlonToken = artifacts.require("./SecondBiathlonToken.sol");
let nl;
let bn;
let bt;
let sn;
let st;
contract('BiathlonToken', function(accounts) {
beforeEach(async function() {
bn = await BiathlonNode.deployed();
nl = await Nodelist.deployed();
bt = await BiathlonToken.deployed();
st = await SecondBiathlonToken.deployed();
sn = await SecondNode.deployed();
});
it('should have an owner', async function() {
let owner = await bt.owner();
assert.isTrue(owner !== 0);
});
it('should belong to the correct node', async function() {
let node = await bt.node_address();
let bna = await bn.address;
assert.equal(node, bna, "Token was not initialised to correct node");
});
it('should have a storage contract that is separate', async function() {
let storage_address = await bt.storage_address();
assert.notEqual(storage_address, bt.address);
});
it("should be able to register itself with the Node list of tokens", async function() {
let registration = await bt.register_with_node();
let node_token_count = await bn.count_tokens();
assert.equal(node_token_count, 1, "Node array of tokens doesn't have deployed BiathlonToken");
});
it('should mint a given amount of tokens to a given address', async function() {
const result = await bt.mint(accounts[0], 100, { from: accounts[0] });
assert.equal(result.logs[0].event, 'Mint');
assert.equal(result.logs[0].args.to.valueOf(), accounts[0]);
assert.equal(result.logs[0].args.amount.valueOf(), 100);
assert.equal(result.logs[1].event, 'Transfer');
assert.equal(result.logs[1].args.from.valueOf(), 0x0);
let balance0 = await bt.balanceOf(accounts[0]);
assert(balance0 == 100);
let totalSupply = await bt.totalSupply();
assert.equal(totalSupply, 100);
})
it('should allow owner to mint 50 to account #2', async function() {
let result = await bt.mint(accounts[2], 50);
assert.equal(result.logs[0].event, 'Mint');
assert.equal(result.logs[0].args.to.valueOf(), accounts[2]);
assert.equal(result.logs[0].args.amount.valueOf(), 50);
assert.equal(result.logs[1].event, 'Transfer');
assert.equal(result.logs[1].args.from.valueOf(), 0x0);
let new_balance = await bt.balanceOf(accounts[2]);
assert.equal(new_balance, 50, 'Owner could not mint 50 to account #2');
});
it('should have account #2 on registry after first token minting', async function() {
let check_user = await nl.users(accounts[2]);
assert.equal(check_user, bn.address);
});
it('should spend 25 of the tokens minted to account #2', async function() {
let result = await bt.spend(accounts[2], 25);
assert.equal(result.logs[0].event, 'Burn');
let new_balance = await bt.balanceOf(accounts[2]);
assert.equal(new_balance, 25);
});
it('should have total supply changed by these minting and spending operations', async function() {
let result = await bt.totalSupply();
assert.equal(result, 125);
});
it('should not allow non-onwers to spend', async function() {
try {
let spendtask = await bt.spend(accounts[0], 1, {from: accounts[2]})
} catch (error) {
const invalidJump = error.message.search('invalid opcode') >= 0;
assert(invalidJump, "Expected throw, got '" + error + "' instead");
return;
}
assert.fail("Expected to reject spending from non-owner");
});
it('should not allow non-owners to mint', async function() {
try {
let minttask = await bt.mint(accounts[2], 50, {from: accounts[1]});
} catch (error) {
const invalidJump = error.message.search('invalid opcode') >= 0;
assert(invalidJump, "Expected throw, got '" + error + "' instead");
return;
}
assert.fail("Expected to reject minting from non-owner");
});
it('should not be able to spend more than it has', async function() {
try {
let spendtask = await bt.spend(accounts[2], 66)
} catch (error) {
const invalidJump = error.message.search('invalid opcode') >= 0;
assert(invalidJump, "Expected throw, got '" + error + "' instead");
return;
}
assert.fail("Expected to reject spending more than limit");
});
it('second deployed token should belong to the correct node', async function() {
let node = await st.node_address();
let bna = await bn.address;
assert.equal(node, bna, "Token was not initialised to correct node");
});
it('second token should be able to upgrade the token with the node', async function() {
let name = await st.name();
const upgraded = await bn.upgrade_token(bt.address, st.address, name);
assert.equal(upgraded.logs[0].event, 'UpgradeToken');
let count_of_tokens = await bn.count_tokens();
assert.equal(count_of_tokens, 1, 'Should only be one token in tokenlist still');
});
it('should deactivate original token after upgrade', async function () {
let tia = await bn.tokens.call(bt.address);
assert.isNotTrue(tia[1]);
let newtoken = await bn.tokens.call(st.address);
assert.isTrue(newtoken[1]);
});
it('should carry over the previous balances since storage contract is fixed', async function() {
let get_balance = await st.balanceOf(accounts[2]);
assert.equal(get_balance, 25);
});
it('should not allow the deactivated contract to mint', async function() {
try {
let newmint = await bt.mint(accounts[2], 10);
} catch(error) {
const invalidJump = error.message.search('invalid opcode') >= 0;
assert(invalidJump, "Expected throw, got '" + error + "' instead");
return;
}
assert.fail("Expected to reject spending more than limit");
});
it('should allow minting more tokens to accounts', async function() {
let newmint = await st.mint(accounts[2], 3);
let getbalance = await st.balanceOf(accounts[2]);
let totalsupply = await st.totalSupply();
assert.equal(totalsupply, 128);
assert.equal(getbalance, 28);
});
it('should be able to transfer as contract owner from one account to another', async function() {
let thetransfer = await st.biathlon_transfer(accounts[2], accounts[3], 2);
let getbalance2 = await st.balanceOf(accounts[2]);
let getbalance3 = await st.balanceOf(accounts[3]);
assert.equal(getbalance2, 26);
assert.equal(getbalance3, 2);
});
it('should not be able to transfer as non-owner from one account to another', async function() {
try {
let thetransfer = await st.biathlon_transfer(accounts[3], accounts[4], 1, {from: accounts[1]});
} catch(error) {
const invalidJump = error.message.search('invalid opcode') >= 0;
assert(invalidJump, "Expected throw, got '" + error + "' instead");
return;
}
assert.fail("Expected to reject transfering from non-owner");
})<|fim▁hole|>
});<|fim▁end|> | |
<|file_name|>AlwaysLowestAdaptationLogic.cpp<|end_file_name|><|fim▁begin|>/*
* AlwaysLowestAdaptationLogic.cpp
*****************************************************************************
* Copyright (C) 2014 - VideoLAN authors
*
* This program is free software; you can redistribute it and/or modify it<|fim▁hole|> *
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA.
*****************************************************************************/
#include "AlwaysLowestAdaptationLogic.hpp"
#include "Representationselectors.hpp"
using namespace adaptative::logic;
using namespace adaptative::playlist;
AlwaysLowestAdaptationLogic::AlwaysLowestAdaptationLogic():
AbstractAdaptationLogic()
{
}
BaseRepresentation *AlwaysLowestAdaptationLogic::getCurrentRepresentation(BaseAdaptationSet *adaptSet) const
{
RepresentationSelector selector;
return selector.select(adaptSet, 0);
}<|fim▁end|> | * under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version. |
<|file_name|>GuiInventoryTFC.java<|end_file_name|><|fim▁begin|>package com.bioxx.tfc2.gui;
import java.awt.Rectangle;
import java.util.Collection;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.inventory.GuiContainerCreative;
import net.minecraft.client.gui.inventory.GuiInventory;
import net.minecraft.client.renderer.InventoryEffectRenderer;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.Slot;
import net.minecraft.stats.AchievementList;
import net.minecraft.util.ResourceLocation;
import org.lwjgl.opengl.GL11;
import com.bioxx.tfc2.Core;
import com.bioxx.tfc2.Reference;
import com.bioxx.tfc2.core.PlayerInventory;
public class GuiInventoryTFC extends InventoryEffectRenderer
{
private float xSizeLow;
private float ySizeLow;
private boolean hasEffect;
protected static final ResourceLocation UPPER_TEXTURE = new ResourceLocation(Reference.ModID+":textures/gui/inventory.png");
protected static final ResourceLocation UPPER_TEXTURE_2X2 = new ResourceLocation(Reference.ModID+":textures/gui/gui_inventory2x2.png");
protected static final ResourceLocation EFFECTS_TEXTURE = new ResourceLocation(Reference.ModID+":textures/gui/inv_effects.png");
protected EntityPlayer player;
protected Slot activeSlot;
public GuiInventoryTFC(EntityPlayer player)
{
super(player.inventoryContainer);
this.allowUserInput = true;
player.addStat(AchievementList.OPEN_INVENTORY, 1);
xSize = 176;
ySize = 102 + PlayerInventory.invYSize;
this.player = player;
}
@Override
protected void drawGuiContainerBackgroundLayer(float par1, int par2, int par3)
{
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
if(player.getEntityData().hasKey("craftingTable"))
Core.bindTexture(UPPER_TEXTURE);
else
Core.bindTexture(UPPER_TEXTURE_2X2);
int k = this.guiLeft;
int l = this.guiTop;
this.drawTexturedModalRect(k, l, 0, 0, this.xSize, 102);
//Draw the player avatar
GuiInventory.drawEntityOnScreen(k + 51, l + 75, 30, k + 51 - this.xSizeLow, l + 75 - 50 - this.ySizeLow, this.mc.player);
PlayerInventory.drawInventory(this, width, height, ySize - PlayerInventory.invYSize);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
}
@Override
protected void drawGuiContainerForegroundLayer(int par1, int par2)
{
//this.fontRenderer.drawString(I18n.format("container.crafting", new Object[0]), 86, 7, 4210752);
}
@Override
/**
* Called from the main game loop to update the screen.<|fim▁hole|> this.mc.displayGuiScreen(new GuiContainerCreative(player));
}
@Override
public void initGui()
{
super.buttonList.clear();
if (this.mc.playerController.isInCreativeMode())
{
this.mc.displayGuiScreen(new GuiContainerCreative(this.mc.player));
}
else
super.initGui();
if (!this.mc.player.getActivePotionEffects().isEmpty())
{
//this.guiLeft = 160 + (this.width - this.xSize - 200) / 2;
this.guiLeft = (this.width - this.xSize) / 2;
this.hasEffect = true;
}
buttonList.clear();
buttonList.add(new GuiInventoryButton(0, new Rectangle(guiLeft+176, guiTop + 3, 25, 20),
new Rectangle(0, 103, 25, 20), Core.translate("gui.Inventory.Inventory"), new Rectangle(1,223,32,32)));
buttonList.add(new GuiInventoryButton(1, new Rectangle(guiLeft+176, guiTop + 22, 25, 20),
new Rectangle(0, 103, 25, 20), Core.translate("gui.Inventory.Skills"), new Rectangle(100,223,32,32)));
buttonList.add(new GuiInventoryButton(2, new Rectangle(guiLeft+176, guiTop + 41, 25, 20),
new Rectangle(0, 103, 25, 20), Core.translate("gui.Calendar.Calendar"), new Rectangle(34,223,32,32)));
buttonList.add(new GuiInventoryButton(3, new Rectangle(guiLeft+176, guiTop + 60, 25, 20),
new Rectangle(0, 103, 25, 20), Core.translate("gui.Inventory.Health"), new Rectangle(67,223,32,32)));
}
@Override
protected void actionPerformed(GuiButton guibutton)
{
//Removed during port
if (guibutton.id == 1)
Minecraft.getMinecraft().displayGuiScreen(new GuiSkills(player));
/*else if (guibutton.id == 2)
Minecraft.getMinecraft().displayGuiScreen(new GuiCalendar(player));*/
else if (guibutton.id == 3)
Minecraft.getMinecraft().displayGuiScreen(new GuiHealth(player));
}
@Override
public void drawScreen(int par1, int par2, float par3)
{
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
super.drawScreen(par1, par2, par3);
this.xSizeLow = par1;
this.ySizeLow = par2;
if(hasEffect)
displayDebuffEffects();
//removed during port
/*for (int j1 = 0; j1 < this.inventorySlots.inventorySlots.size(); ++j1)
{
Slot slot = (Slot)this.inventorySlots.inventorySlots.get(j1);
if (this.isMouseOverSlot(slot, par1, par2) && slot.func_111238_b())
this.activeSlot = slot;
}*/
}
protected boolean isMouseOverSlot(Slot par1Slot, int par2, int par3)
{
return this.isPointInRegion(par1Slot.xPos, par1Slot.yPos, 16, 16, par2, par3);
}
/**
* Displays debuff/potion effects that are currently being applied to the player
*/
private void displayDebuffEffects()
{
int var1 = this.guiLeft - 124;
int var2 = this.guiTop;
Collection var4 = this.mc.player.getActivePotionEffects();
//Remvoed during port
/*if (!var4.isEmpty())
{
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
GL11.glDisable(GL11.GL_LIGHTING);
int var6 = 33;
if (var4.size() > 5)
var6 = 132 / (var4.size() - 1);
for (Iterator var7 = this.mc.player.getActivePotionEffects().iterator(); var7.hasNext(); var2 += var6)
{
PotionEffect var8 = (PotionEffect)var7.next();
Potion var9 = Potion.potionTypes[var8.getPotionID()] instanceof TFCPotion ?
((TFCPotion) Potion.potionTypes[var8.getPotionID()]) :
Potion.potionTypes[var8.getPotionID()];
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
TFC_Core.bindTexture(EFFECTS_TEXTURE);
this.drawTexturedModalRect(var1, var2, 0, 166, 140, 32);
if (var9.hasStatusIcon())
{
int var10 = var9.getStatusIconIndex();
this.drawTexturedModalRect(var1 + 6, var2 + 7, 0 + var10 % 8 * 18, 198 + var10 / 8 * 18, 18, 18);
}
String var12 = Core.translate(var9.getName());
if (var8.getAmplifier() == 1)
var12 = var12 + " II";
else if (var8.getAmplifier() == 2)
var12 = var12 + " III";
else if (var8.getAmplifier() == 3)
var12 = var12 + " IV";
this.fontRenderer.drawStringWithShadow(var12, var1 + 10 + 18, var2 + 6, 16777215);
String var11 = Potion.getDurationString(var8);
this.fontRenderer.drawStringWithShadow(var11, var1 + 10 + 18, var2 + 6 + 10, 8355711);
}
}*/
}
private long spamTimer;
@Override
protected boolean checkHotbarKeys(int keycode)
{
/*if(this.activeSlot != null && this.activeSlot.slotNumber == 0 && this.activeSlot.getHasStack() &&
this.activeSlot.getStack().getItem() instanceof IFood)
return false;*/
return super.checkHotbarKeys(keycode);
}
private int getEmptyCraftSlot()
{
if(this.inventorySlots.getSlot(4).getStack() == null)
return 4;
if(this.inventorySlots.getSlot(1).getStack() == null)
return 1;
if(this.inventorySlots.getSlot(2).getStack() == null)
return 2;
if(this.inventorySlots.getSlot(3).getStack() == null)
return 3;
if(player.getEntityData().hasKey("craftingTable"))
{
if(this.inventorySlots.getSlot(45).getStack() == null)
return 45;
if(this.inventorySlots.getSlot(46).getStack() == null)
return 46;
if(this.inventorySlots.getSlot(47).getStack() == null)
return 47;
if(this.inventorySlots.getSlot(48).getStack() == null)
return 48;
if(this.inventorySlots.getSlot(49).getStack() == null)
return 49;
}
return -1;
}
}<|fim▁end|> | */
public void updateScreen()
{
if (this.mc.playerController.isInCreativeMode()) |
<|file_name|>complementGraphTest.py<|end_file_name|><|fim▁begin|>__author__ = 'bruno'
import unittest
import algorithms.graphs.complementGraph as ComplementGraph
class TestComplementGraph(unittest.TestCase):
def setUp(self):
pass
<|fim▁hole|> 'b': {'a': 1, 'c': 1, 'd': 1},
'c': {'a': 1, 'b': 1, 'd': 1},
'd': {'b': 1, 'c': 1}}
self.assertEqual({'a': {'d': 1}, 'd': {'a': 1}},
ComplementGraph.make_complement_graph(graph))
def test_complement_graph_2(self):
graph = {'a': {'b': 1, 'd': 1},
'b': {'a': 1, 'c': 1},
'c': {'b': 1, 'd': 1},
'd': {'a': 1, 'c': 1}}
complement = {'a': {'c': 1},
'b': {'d': 1},
'c': {'a': 1},
'd': {'b': 1}}
self.assertEqual(complement, ComplementGraph.make_complement_graph(graph))
def test_complement_graph_3(self):
graph = {'a': {'c': 1, 'd': 1},
'b': {'c': 1, 'd': 1},
'c': {'a': 1, 'b': 1},
'd': {'a': 1, 'b': 1, 'e': 1, 'f': 1},
'e': {'d': 1, 'f': 1},
'f': {'d': 1, 'e': 1}}
complement = {'a': {'b': 1, 'e': 1, 'f': 1},
'b': {'a': 1, 'e': 1, 'f': 1},
'c': {'e': 1, 'd': 1, 'f': 1},
'd': {'c': 1},
'e': {'a': 1, 'c': 1, 'b': 1},
'f': {'a': 1, 'c': 1, 'b': 1}}
self.assertEqual(complement, ComplementGraph.make_complement_graph(graph))
def test_complement_graph_4(self):
graph = {'a': {'b': 1, 'f': 1},
'b': {'a': 1, 'c': 1},
'c': {'b': 1, 'd': 1},
'd': {'c': 1, 'e': 1},
'e': {'d': 1, 'f': 1},
'f': {'a': 1, 'e': 1}}
complement = {'a': {'c': 1, 'e': 1, 'd': 1},
'b': {'e': 1, 'd': 1, 'f': 1},
'c': {'a': 1, 'e': 1, 'f': 1},
'd': {'a': 1, 'b': 1, 'f': 1},
'e': {'a': 1, 'c': 1, 'b': 1},
'f': {'c': 1, 'b': 1, 'd': 1}}
self.assertEqual(complement, ComplementGraph.make_complement_graph(graph))
def test_complement_graph_5(self):
graph = {'a': {'b': 1, 'c': 1, 'd': 1, 'e': 1},
'b': {'a': 1, 'c': 1, 'd': 1, 'e': 1},
'c': {'a': 1, 'b': 1, 'd': 1, 'e': 1},
'd': {'a': 1, 'b': 1, 'c': 1, 'e': 1},
'e': {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'f': 1},
'f': {'e': 1}}
complement = {'a': {'f': 1},
'b': {'f': 1},
'c': {'f': 1},
'd': {'f': 1},
'f': {'a': 1, 'c': 1, 'b': 1, 'd': 1}}
self.assertEqual(complement, ComplementGraph.make_complement_graph(graph))<|fim▁end|> | def test_complement_graph_1(self):
graph = {'a': {'b': 1, 'c': 1}, |
<|file_name|>EncoderUserModesComponent.py<|end_file_name|><|fim▁begin|># http://remotescripts.blogspot.com
"""
Track Control User Modes component originally designed for use with the APC40.
Copyright (C) 2010 Hanz Petrov <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import Live
from _Framework.ModeSelectorComponent import ModeSelectorComponent
from _Framework.ButtonElement import ButtonElement
from _Framework.DeviceComponent import DeviceComponent
class EncoderUserModesComponent(ModeSelectorComponent):
' SelectorComponent that assigns encoders to different user functions '
__module__ = __name__
def __init__(self, parent, encoder_modes, param_controls, bank_buttons, mixer, device, encoder_device_modes, encoder_eq_modes): #, mixer, sliders):
assert (len(bank_buttons) == 4)
ModeSelectorComponent.__init__(self)
self._parent = parent
self._encoder_modes = encoder_modes
self._param_controls = param_controls
self._bank_buttons = bank_buttons
self._mixer = mixer
self._device = device
self._encoder_device_modes = encoder_device_modes
self._encoder_eq_modes = encoder_eq_modes
self._mode_index = 0
self._modes_buttons = []
self._user_buttons = []
self._last_mode = 0
def disconnect(self):
ModeSelectorComponent.disconnect(self)
self._parent = None
self._encoder_modes = None
self._param_controls = None
self._bank_buttons = None
self._mixer = None
self._device = None
self._encoder_device_modes = None
self._encoder_eq_modes = None
self._modes_buttons = None
self._user_buttons = None
def on_enabled_changed(self):
pass
def set_mode(self, mode):
assert isinstance(mode, int)
assert (mode in range(self.number_of_modes()))
if (self._mode_index != mode):
self._last_mode = self._mode_index # keep track of previous mode, to allow conditional actions
self._mode_index = mode
self._set_modes()
def set_mode_buttons(self, buttons):
assert isinstance(buttons, (tuple,
type(None)))
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
assert (self._mode_index in range(self.number_of_modes()))
def number_of_modes(self):
return 4
def update(self):
pass
def _mode_value(self, value, sender):
assert (len(self._modes_buttons) > 0)
assert isinstance(value, int)
assert isinstance(sender, ButtonElement)
assert (self._modes_buttons.count(sender) == 1)
if ((value is not 0) or (not sender.is_momentary())):
self.set_mode(self._modes_buttons.index(sender))
def _set_modes(self):
if self.is_enabled():
assert (self._mode_index in range(self.number_of_modes()))
for index in range(len(self._modes_buttons)):
if (index <= self._mode_index):
self._modes_buttons[index].turn_on()
else:
self._modes_buttons[index].turn_off()
for button in self._modes_buttons:
button.release_parameter()
button.use_default_message()
for control in self._param_controls:
control.release_parameter()
control.use_default_message()
#control.set_needs_takeover(False)
self._encoder_modes.set_enabled(False)
self._encoder_device_modes.set_lock_button(None)
self._encoder_device_modes._alt_device.set_bank_nav_buttons(None, None)
self._encoder_device_modes._alt_device.set_on_off_button(None)
if self._encoder_device_modes._alt_device._parameter_controls != None:
for control in self._encoder_device_modes._alt_device._parameter_controls:
control.release_parameter()
self._encoder_device_modes.set_enabled(False)
self._encoder_eq_modes.set_enabled(False)
self._encoder_eq_modes.set_lock_button(None)
if self._encoder_eq_modes._track_eq != None:
self._encoder_eq_modes._track_eq.set_cut_buttons(None)
if self._encoder_eq_modes._track_eq._gain_controls != None:
for control in self._encoder_eq_modes._track_eq._gain_controls:
control.release_parameter()
if self._encoder_eq_modes._strip != None:
self._encoder_eq_modes._strip.set_send_controls(None)
self._user_buttons = []
if (self._mode_index == 0):
self._encoder_modes.set_enabled(True)
elif (self._mode_index == 1):
self._encoder_device_modes.set_enabled(True)<|fim▁hole|> elif (self._mode_index == 2):
self._encoder_eq_modes.set_enabled(True)
self._encoder_eq_modes.set_controls_and_buttons(self._param_controls, self._modes_buttons)
elif (self._mode_index == 3):
self._encoder_eq_modes._ignore_buttons = True
if self._encoder_eq_modes._track_eq != None:
self._encoder_eq_modes._track_eq._ignore_cut_buttons = True
self._encoder_device_modes._ignore_buttons = True
for button in self._modes_buttons:
self._user_buttons.append(button)
for control in self._param_controls:
control.set_identifier((control.message_identifier() - 9))
control._ring_mode_button.send_value(0)
else:
pass
#self._rebuild_callback()
# local variables:
# tab-width: 4<|fim▁end|> | self._encoder_device_modes.set_controls_and_buttons(self._param_controls, self._modes_buttons)
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>import { ModuleWithProviders } from "@angular/core";
export * from './src/app/services/image-lazy-load.service';
export * from './src/app/services/web-worker.service';
export * from './src/app/directives/image-lazy-load.directive';<|fim▁hole|><|fim▁end|> | export declare class ImageLazyLoadModule {
static forRoot(configuredProviders: Array<any>): ModuleWithProviders;
} |
<|file_name|>strategy.go<|end_file_name|><|fim▁begin|>package buildconfiginstantiate
import (
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/validation/field"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/kubernetes/pkg/api/legacyscheme"
buildapi "github.com/openshift/origin/pkg/build/apis/build"
buildvalidation "github.com/openshift/origin/pkg/build/apis/build/validation"
)
type strategy struct {
runtime.ObjectTyper
}
var Strategy = strategy{legacyscheme.Scheme}
func (strategy) NamespaceScoped() bool {
return true
}
func (strategy) AllowCreateOnUpdate() bool {
return false
}
func (strategy) GenerateName(base string) string {
return base
}
// PrepareForCreate clears fields that are not allowed to be set by end users on creation.<|fim▁hole|>func (strategy) Canonicalize(obj runtime.Object) {
}
// Validate validates a new role.
func (strategy) Validate(ctx apirequest.Context, obj runtime.Object) field.ErrorList {
return buildvalidation.ValidateBuildRequest(obj.(*buildapi.BuildRequest))
}
type binaryStrategy struct {
runtime.ObjectTyper
}
var BinaryStrategy = binaryStrategy{legacyscheme.Scheme}
func (binaryStrategy) NamespaceScoped() bool {
return true
}
func (binaryStrategy) AllowCreateOnUpdate() bool {
return false
}
func (binaryStrategy) GenerateName(base string) string {
return base
}
// PrepareForCreate clears fields that are not allowed to be set by end users on creation.
func (binaryStrategy) PrepareForCreate(ctx apirequest.Context, obj runtime.Object) {
}
// Canonicalize normalizes the object after validation.
func (binaryStrategy) Canonicalize(obj runtime.Object) {
}
// Validate validates a new role.
func (binaryStrategy) Validate(ctx apirequest.Context, obj runtime.Object) field.ErrorList {
// TODO: validate
return nil
}<|fim▁end|> | func (strategy) PrepareForCreate(ctx apirequest.Context, obj runtime.Object) {
}
// Canonicalize normalizes the object after validation. |
<|file_name|>pyfile_transfer.py<|end_file_name|><|fim▁begin|>'''
Created on 10/5/2015
@author: johnPortella
@version: 1.0
'''
import paramiko, os
from ftplib import FTP
#from config_parser import ConfigUtils
class PyFileTransfer(object):
'''
def __init__(self, transfName):
transfConf = ConfigUtils.read_trasnfer_config(transfName)
#hostname
self.__hostname = transfConf['host']
#username
self.__username = transfConf['user']
#password
self.__password = transfConf['password']
#protocol
self.__typeProtocol = transfConf['type']
#so
if transfConf['so'] == 'unix':
self.__SEP = '/'
elif transfConf['so'] == 'win':
self.__SEP = chr(92)
#port
if 'port' in transfConf:
self.__port = transfConf['port']
else:
self.__port = None
#open transfering
if self.__typeProtocol == 'ftp':
if self.__port is None:
self.__port = 21
#open
self.__t = FTP()
self.__t.connect(self.__hostname, self.__port, self.__timeout)
elif self.__typeProtocol == 'sftp':
if self.__port is None:
self.__port = 22
#open
self.__ssh = paramiko.Transport((self.__hostname, self.__port))
def connection(self):
if self.__typeProtocol == 'ftp':
self.__t.login(self.__username, self.__password)
#default directory
self.__defaultDirectory = self.__t.pwd()
elif self.__typeProtocol == 'sftp':
self.__ssh.connect(username = self.__username, password = self.__password)
self.__t = paramiko.SFTPClient.from_transport(self.__ssh)
#default directory
self.__defaultDirectory = None
'''
def __init__(self, typeProtocol='ftp', hostname = 'localhost', so='unix', port = None, timeout = None):
#Protocol
self.__typeProtocol = typeProtocol
#host
self.__hostname = hostname
#so
if so == 'unix':
self.__SEP = '/'
elif so == 'win':
self.__SEP = chr(92)
#timeout
self.__timeout = timeout
#port
if port:
self.__port = port
#open transfering
if self.__typeProtocol == 'ftp':
if not port:
self.__port = 21
#open
self.__t = FTP()
self.__t.connect(self.__hostname, self.__port, self.__timeout)
elif self.__typeProtocol == 'sftp':
if not port:
self.__port = 22
#open
self.__ssh = paramiko.Transport((self.__hostname, self.__port))
def connection(self, username, password):
if self.__typeProtocol == 'ftp':
self.__t.login(username, password)
#default directory
self.__defaultDirectory = self.__t.pwd()
elif self.__typeProtocol == 'sftp':
self.__ssh.connect(username = username, password = password)
self.__t = paramiko.SFTPClient.from_transport(self.__ssh)
self.__t.sock.settimeout(self.__timeout)
#default directory
self.__defaultDirectory = None
def get(self, filename, remoteDirectory=None, localDirectory=None):
if localDirectory is None:
localDirectory = os.path.dirname(os.path.realpath(__file__))
if self.__typeProtocol == 'ftp':
pwdAux = self.__t.pwd()
if remoteDirectory is not None:
self.__t.cwd(remoteDirectory)
remoteFile = open(os.path.join(localDirectory, filename), 'wb').write
self.__t.retrbinary("RETR " + filename, remoteFile)
self.__t.cwd(pwdAux)
remoteFile.close()
elif self.__typeProtocol == 'sftp':
if remoteDirectory is not None:
self.__t.chdir(remoteDirectory) <|fim▁hole|>
def put(self, filename, remoteDirectory=None, localDirectory=None):
if localDirectory is None:
localDirectory = os.path.dirname(os.path.realpath(__file__))
if self.__typeProtocol == 'ftp':
pwdAux = self.__t.pwd()
if remoteDirectory is not None:
self.__t.cwd(remoteDirectory)
localFile = open(filename, 'r')
self.__t.storbinary('RETR %s' % filename, localFile.write)
self.__t.cwd(pwdAux)
localFile.close()
elif self.__typeProtocol == 'sftp':
if remoteDirectory is not None:
self.__t.chdir(remoteDirectory)
self.__t.put(os.path.join(localDirectory, filename), filename)
self.__t.chdir(None)
def disconnect(self):
if self.__typeProtocol == 'ftp':
self.__t.quit()
elif self.__typeProtocol == 'sftp':
self.__t.close()
self.__ssh.close()
def pwd(self):
if self.__typeProtocol == 'ftp':
return self.__t.pwd()
elif self.__typeProtocol == 'sftp':
return self.__t.getcwd()
def cwd(self, remoteDirectory = None):
if self.__typeProtocol == 'ftp':
self.__t.cwd(remoteDirectory)
elif self.__typeProtocol == 'sftp':
self.__t.chdir(remoteDirectory)
def setDefaultDirectory(self):
if self.__typeProtocol == 'ftp':
self.__t.cwd(self.__defaultDirectory)
elif self.__typeProtocol == 'sftp':
self.__t.chdir(None)
def remotePathJoin (self, *paths):
"""Returns separate paths to string"""
if len(paths)== 0:
return None
if len(paths)== 1:
return paths[0]
else:
path = paths[0]
for i in paths[1:]:
path += self.__SEP + i
return path
t = PyFileTransfer('sftp', 'test.rebex.net', 'unix')
t.connection("demo", "password")
#t.get("WinFormClient.png", '/pub/example')
print t.pwd()
t.cwd('pub/example')
print t.pwd()
t.setDefaultDirectory()
print t.pwd()
t.disconnect()<|fim▁end|> | self.__t.get(filename, os.path.join(localDirectory, filename))
self.__t.chdir(None) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
// TODO: Falsely triggers for async/await:
// see https://github.com/rust-lang/rust-clippy/issues/5360
// clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
#![type_length_limit = "2058438"]
#[cfg(test)]
mod tests;<|fim▁hole|>
pub use client::{client_execute, NailgunClientError};
pub use nails::execution::ExitCode;
pub use server::{RawFdExecution, Server};<|fim▁end|> |
mod client;
mod server; |
<|file_name|>render_api.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(missing_docs)]
use std::cell::Cell;
use std::fmt;
use std::marker::PhantomData;
use std::path::PathBuf;
use std::sync::Arc;
use std::u32;
use time::precise_time_ns;
//use crate::api::peek_poke::PeekPoke;
use crate::api::channel::{Sender, single_msg_channel, unbounded_channel};
use crate::api::{ColorF, BuiltDisplayList, IdNamespace, ExternalScrollId, Parameter, BoolParameter};
use crate::api::{FontKey, FontInstanceKey, NativeFontHandle};
use crate::api::{BlobImageData, BlobImageKey, ImageData, ImageDescriptor, ImageKey, Epoch, QualitySettings};
use crate::api::{BlobImageParams, BlobImageRequest, BlobImageResult, AsyncBlobImageRasterizer, BlobImageHandler};
use crate::api::{DocumentId, PipelineId, PropertyBindingId, PropertyBindingKey, ExternalEvent};
use crate::api::{HitTestResult, HitTesterRequest, ApiHitTester, PropertyValue, DynamicProperties};
use crate::api::{SampledScrollOffset, TileSize, NotificationRequest, DebugFlags};
use crate::api::{GlyphDimensionRequest, GlyphIndexRequest, GlyphIndex, GlyphDimensions};
use crate::api::{FontInstanceOptions, FontInstancePlatformOptions, FontVariation, RenderReasons};
use crate::api::DEFAULT_TILE_SIZE;
use crate::api::units::*;
use crate::api_resources::ApiResources;
use crate::glyph_rasterizer::SharedFontInstanceMap;
use crate::scene_builder_thread::{SceneBuilderRequest, SceneBuilderResult};
use crate::intern::InterningMemoryReport;
use crate::profiler::{self, TransactionProfile};
#[repr(C)]
#[derive(Clone, Copy, Debug)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
struct ResourceId(pub u32);
/// Update of a persistent resource in WebRender.
///
/// ResourceUpdate changes keep theirs effect across display list changes.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub enum ResourceUpdate {
/// See `AddImage`.
AddImage(AddImage),
/// See `UpdateImage`.
UpdateImage(UpdateImage),
/// Delete an existing image resource.
///
/// It is invalid to continue referring to the image key in any display list
/// in the transaction that contains the `DeleteImage` message and subsequent
/// transactions.
DeleteImage(ImageKey),
/// See `AddBlobImage`.
AddBlobImage(AddBlobImage),
/// See `UpdateBlobImage`.
UpdateBlobImage(UpdateBlobImage),
/// Delete existing blob image resource.
DeleteBlobImage(BlobImageKey),
/// See `AddBlobImage::visible_area`.
SetBlobImageVisibleArea(BlobImageKey, DeviceIntRect),
/// See `AddFont`.
AddFont(AddFont),
/// Deletes an already existing font resource.
///
/// It is invalid to continue referring to the font key in any display list
/// in the transaction that contains the `DeleteImage` message and subsequent
/// transactions.
DeleteFont(FontKey),
/// See `AddFontInstance`.
AddFontInstance(AddFontInstance),
/// Deletes an already existing font instance resource.
///
/// It is invalid to continue referring to the font instance in any display
/// list in the transaction that contains the `DeleteImage` message and
/// subsequent transactions.
DeleteFontInstance(FontInstanceKey),
}
impl fmt::Debug for ResourceUpdate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ResourceUpdate::AddImage(ref i) => f.write_fmt(format_args!(
"ResourceUpdate::AddImage size({:?})",
&i.descriptor.size
)),
ResourceUpdate::UpdateImage(ref i) => f.write_fmt(format_args!(
"ResourceUpdate::UpdateImage size({:?})",
&i.descriptor.size
)),
ResourceUpdate::AddBlobImage(ref i) => f.write_fmt(format_args!(
"ResourceUFpdate::AddBlobImage size({:?})",
&i.descriptor.size
)),
ResourceUpdate::UpdateBlobImage(i) => f.write_fmt(format_args!(
"ResourceUpdate::UpdateBlobImage size({:?})",
&i.descriptor.size
)),
ResourceUpdate::DeleteImage(..) => f.write_str("ResourceUpdate::DeleteImage"),
ResourceUpdate::DeleteBlobImage(..) => f.write_str("ResourceUpdate::DeleteBlobImage"),
ResourceUpdate::SetBlobImageVisibleArea(..) => f.write_str("ResourceUpdate::SetBlobImageVisibleArea"),
ResourceUpdate::AddFont(..) => f.write_str("ResourceUpdate::AddFont"),
ResourceUpdate::DeleteFont(..) => f.write_str("ResourceUpdate::DeleteFont"),
ResourceUpdate::AddFontInstance(..) => f.write_str("ResourceUpdate::AddFontInstance"),
ResourceUpdate::DeleteFontInstance(..) => f.write_str("ResourceUpdate::DeleteFontInstance"),
}
}
}
/// Whether to generate a frame, and if so, an id that allows tracking this
/// transaction through the various frame stages.
#[derive(Clone, Debug)]
pub enum GenerateFrame {
/// Generate a frame if something changed.
Yes {
/// An id that allows tracking the frame transaction through the various
/// frame stages. Specified by the caller of generate_frame().
id: u64,
},
/// Don't generate a frame even if something has changed.
No,
}
impl GenerateFrame {
///
pub fn as_bool(&self) -> bool {
match self {
GenerateFrame::Yes { .. } => true,
GenerateFrame::No => false,
}
}
/// Return the frame ID, if a frame is generated.
pub fn id(&self) -> Option<u64> {
match self {
GenerateFrame::Yes { id } => Some(*id),
GenerateFrame::No => None,
}
}
}
/// A Transaction is a group of commands to apply atomically to a document.
///
/// This mechanism ensures that:
/// - no other message can be interleaved between two commands that need to be applied together.
/// - no redundant work is performed if two commands in the same transaction cause the scene or
/// the frame to be rebuilt.
pub struct Transaction {
/// Operations affecting the scene (applied before scene building).
scene_ops: Vec<SceneMsg>,
/// Operations affecting the generation of frames (applied after scene building).
frame_ops: Vec<FrameMsg>,
notifications: Vec<NotificationRequest>,
/// Persistent resource updates to apply as part of this transaction.
pub resource_updates: Vec<ResourceUpdate>,
/// True if the transaction needs the scene building thread's attention.
/// False for things that can skip the scene builder, like APZ changes and
/// async images.
///
/// Before this `Transaction` is converted to a `TransactionMsg`, we look
/// over its contents and set this if we're doing anything the scene builder
/// needs to know about, so this is only a default.
use_scene_builder_thread: bool,
/// Whether to generate a frame, and if so, an id that allows tracking this
/// transaction through the various frame stages. Specified by the caller of
/// generate_frame().
generate_frame: GenerateFrame,
/// Set to true in order to force re-rendering even if WebRender can't internally
/// detect that something has changed.
pub invalidate_rendered_frame: bool,
low_priority: bool,
///
pub render_reasons: RenderReasons,
}
impl Transaction {
/// Constructor.
pub fn new() -> Self {
Transaction {
scene_ops: Vec::new(),
frame_ops: Vec::new(),
resource_updates: Vec::new(),
notifications: Vec::new(),
use_scene_builder_thread: true,
generate_frame: GenerateFrame::No,
invalidate_rendered_frame: false,
low_priority: false,
render_reasons: RenderReasons::empty(),
}
}
/// Marks this transaction to allow it to skip going through the scene builder
/// thread.
///
/// This is useful to avoid jank in transaction associated with animated
/// property updates, panning and zooming.
///
/// Note that transactions that skip the scene builder thread can race ahead of
/// transactions that don't skip it.
pub fn skip_scene_builder(&mut self) {
self.use_scene_builder_thread = false;
}
/// Marks this transaction to enforce going through the scene builder thread.
pub fn use_scene_builder_thread(&mut self) {
self.use_scene_builder_thread = true;
}
/// Returns true if the transaction has no effect.
pub fn is_empty(&self) -> bool {
!self.generate_frame.as_bool() &&
!self.invalidate_rendered_frame &&
self.scene_ops.is_empty() &&
self.frame_ops.is_empty() &&
self.resource_updates.is_empty() &&
self.notifications.is_empty()
}
/// Update a pipeline's epoch.
pub fn update_epoch(&mut self, pipeline_id: PipelineId, epoch: Epoch) {
// We track epochs before and after scene building.
// This one will be applied to the pending scene right away:
self.scene_ops.push(SceneMsg::UpdateEpoch(pipeline_id, epoch));
// And this one will be applied to the currently built scene at the end
// of the transaction (potentially long after the scene_ops one).
self.frame_ops.push(FrameMsg::UpdateEpoch(pipeline_id, epoch));
// We could avoid the duplication here by storing the epoch updates in a
// separate array and let the render backend schedule the updates at the
// proper times, but it wouldn't make things simpler.
}
/// Sets the root pipeline.
///
/// # Examples
///
/// ```
/// # use webrender::api::{PipelineId};
/// # use webrender::api::units::{DeviceIntSize};
/// # use webrender::render_api::{RenderApiSender, Transaction};
/// # fn example() {
/// let pipeline_id = PipelineId(0, 0);
/// let mut txn = Transaction::new();
/// txn.set_root_pipeline(pipeline_id);
/// # }
/// ```
pub fn set_root_pipeline(&mut self, pipeline_id: PipelineId) {
self.scene_ops.push(SceneMsg::SetRootPipeline(pipeline_id));
}
/// Removes data associated with a pipeline from the internal data structures.
/// If the specified `pipeline_id` is for the root pipeline, the root pipeline
/// is reset back to `None`.
pub fn remove_pipeline(&mut self, pipeline_id: PipelineId) {
self.scene_ops.push(SceneMsg::RemovePipeline(pipeline_id));
}
/// Supplies a new frame to WebRender.
///
/// Non-blocking, it notifies a worker process which processes the display list.
///
/// Note: Scrolling doesn't require an own Frame.
///
/// Arguments:
///
/// * `epoch`: The unique Frame ID, monotonically increasing.
/// * `background`: The background color of this pipeline.
/// * `viewport_size`: The size of the viewport for this frame.
/// * `pipeline_id`: The ID of the pipeline that is supplying this display list.
/// * `display_list`: The root Display list used in this frame.
pub fn set_display_list(
&mut self,
epoch: Epoch,
background: Option<ColorF>,
viewport_size: LayoutSize,
(pipeline_id, mut display_list): (PipelineId, BuiltDisplayList),
) {
display_list.set_send_time_ns(precise_time_ns());
self.scene_ops.push(
SceneMsg::SetDisplayList {
display_list,
epoch,
pipeline_id,
background,
viewport_size,
}
);
}
/// Add a set of persistent resource updates to apply as part of this transaction.
pub fn update_resources(&mut self, mut resources: Vec<ResourceUpdate>) {
self.resource_updates.append(&mut resources);
}
// Note: Gecko uses this to get notified when a transaction that contains
// potentially long blob rasterization or scene build is ready to be rendered.
// so that the tab-switching integration can react adequately when tab
// switching takes too long. For this use case when matters is that the
// notification doesn't fire before scene building and blob rasterization.
/// Trigger a notification at a certain stage of the rendering pipeline.
///
/// Not that notification requests are skipped during serialization, so is is
/// best to use them for synchronization purposes and not for things that could
/// affect the WebRender's state.
pub fn notify(&mut self, event: NotificationRequest) {
self.notifications.push(event);
}
/// Setup the output region in the framebuffer for a given document.
pub fn set_document_view(
&mut self,
device_rect: DeviceIntRect,
) {
window_size_sanity_check(device_rect.size());
self.scene_ops.push(
SceneMsg::SetDocumentView {
device_rect,
},
);
}
/// Set multiple scroll offsets with generations to the node identified by
/// the given external scroll id, the scroll offsets are relative to the
/// pre-scrolled offset for the scrolling layer.
pub fn set_scroll_offsets(
&mut self,
id: ExternalScrollId,
sampled_scroll_offsets: Vec<SampledScrollOffset>,
) {
self.frame_ops.push(FrameMsg::SetScrollOffsets(id, sampled_scroll_offsets));
}
/// Set the current quality / performance settings for this document.
pub fn set_quality_settings(&mut self, settings: QualitySettings) {
self.scene_ops.push(SceneMsg::SetQualitySettings { settings });
}
///
pub fn set_is_transform_async_zooming(&mut self, is_zooming: bool, animation_id: PropertyBindingId) {
self.frame_ops.push(FrameMsg::SetIsTransformAsyncZooming(is_zooming, animation_id));
}
/// Generate a new frame. When it's done and a RenderNotifier has been set
/// in `webrender::Renderer`, [new_frame_ready()][notifier] gets called.
/// Note that the notifier is called even if the frame generation was a
/// no-op; the arguments passed to `new_frame_ready` will provide information
/// as to when happened.
///
/// [notifier]: trait.RenderNotifier.html#tymethod.new_frame_ready
pub fn generate_frame(&mut self, id: u64, reasons: RenderReasons) {
self.generate_frame = GenerateFrame::Yes{ id };
self.render_reasons |= reasons;
}
/// Invalidate rendered frame. It ensure that frame will be rendered during
/// next frame generation. WebRender could skip frame rendering if there
/// is no update.
/// But there are cases that needs to force rendering.
/// - Content of image is updated by reusing same ExternalImageId.
/// - Platform requests it if pixels become stale (like wakeup from standby).
pub fn invalidate_rendered_frame(&mut self, reasons: RenderReasons) {
self.invalidate_rendered_frame = true;
self.render_reasons |= reasons
}
/// Reset the list of animated property bindings that should be used to resolve
/// bindings in the current display list.
pub fn reset_dynamic_properties(&mut self) {
self.frame_ops.push(FrameMsg::ResetDynamicProperties);
}
/// Add to the list of animated property bindings that should be used to resolve
/// bindings in the current display list.
pub fn append_dynamic_properties(&mut self, properties: DynamicProperties) {
self.frame_ops.push(FrameMsg::AppendDynamicProperties(properties));
}
/// Add to the list of animated property bindings that should be used to
/// resolve bindings in the current display list. This is a convenience method
/// so the caller doesn't have to figure out all the dynamic properties before
/// setting them on the transaction but can do them incrementally.
pub fn append_dynamic_transform_properties(&mut self, transforms: Vec<PropertyValue<LayoutTransform>>) {
self.frame_ops.push(FrameMsg::AppendDynamicTransformProperties(transforms));
}
/// Consumes this object and just returns the frame ops.
pub fn get_frame_ops(self) -> Vec<FrameMsg> {
self.frame_ops
}
fn finalize(self, document_id: DocumentId) -> Box<TransactionMsg> {
Box::new(TransactionMsg {
document_id,
scene_ops: self.scene_ops,
frame_ops: self.frame_ops,
resource_updates: self.resource_updates,
notifications: self.notifications,
use_scene_builder_thread: self.use_scene_builder_thread,
generate_frame: self.generate_frame,
invalidate_rendered_frame: self.invalidate_rendered_frame,
low_priority: self.low_priority,
blob_rasterizer: None,
blob_requests: Vec::new(),
rasterized_blobs: Vec::new(),
profile: TransactionProfile::new(),
render_reasons: self.render_reasons,
})
}
/// See `ResourceUpdate::AddImage`.
pub fn add_image(
&mut self,
key: ImageKey,
descriptor: ImageDescriptor,
data: ImageData,
tiling: Option<TileSize>,
) {
self.resource_updates.push(ResourceUpdate::AddImage(AddImage {
key,
descriptor,
data,
tiling,
}));
}
/// See `ResourceUpdate::UpdateImage`.
pub fn update_image(
&mut self,
key: ImageKey,
descriptor: ImageDescriptor,
data: ImageData,
dirty_rect: &ImageDirtyRect,
) {
self.resource_updates.push(ResourceUpdate::UpdateImage(UpdateImage {
key,
descriptor,
data,
dirty_rect: *dirty_rect,
}));
}
/// See `ResourceUpdate::DeleteImage`.
pub fn delete_image(&mut self, key: ImageKey) {
self.resource_updates.push(ResourceUpdate::DeleteImage(key));
}
/// See `ResourceUpdate::AddBlobImage`.
pub fn add_blob_image(
&mut self,
key: BlobImageKey,
descriptor: ImageDescriptor,
data: Arc<BlobImageData>,
visible_rect: DeviceIntRect,
tile_size: Option<TileSize>,
) {
self.resource_updates.push(
ResourceUpdate::AddBlobImage(AddBlobImage {
key,
descriptor,
data,
visible_rect,
tile_size: tile_size.unwrap_or(DEFAULT_TILE_SIZE),
})
);
}
/// See `ResourceUpdate::UpdateBlobImage`.
pub fn update_blob_image(
&mut self,
key: BlobImageKey,
descriptor: ImageDescriptor,
data: Arc<BlobImageData>,
visible_rect: DeviceIntRect,
dirty_rect: &BlobDirtyRect,
) {
self.resource_updates.push(
ResourceUpdate::UpdateBlobImage(UpdateBlobImage {
key,
descriptor,
data,
visible_rect,
dirty_rect: *dirty_rect,
})
);
}
/// See `ResourceUpdate::DeleteBlobImage`.
pub fn delete_blob_image(&mut self, key: BlobImageKey) {
self.resource_updates.push(ResourceUpdate::DeleteBlobImage(key));
}
/// See `ResourceUpdate::SetBlobImageVisibleArea`.
pub fn set_blob_image_visible_area(&mut self, key: BlobImageKey, area: DeviceIntRect) {
self.resource_updates.push(ResourceUpdate::SetBlobImageVisibleArea(key, area));
}
/// See `ResourceUpdate::AddFont`.
pub fn add_raw_font(&mut self, key: FontKey, bytes: Vec<u8>, index: u32) {
self.resource_updates
.push(ResourceUpdate::AddFont(AddFont::Raw(key, Arc::new(bytes), index)));
}
/// See `ResourceUpdate::AddFont`.
pub fn add_native_font(&mut self, key: FontKey, native_handle: NativeFontHandle) {
self.resource_updates
.push(ResourceUpdate::AddFont(AddFont::Native(key, native_handle)));
}
/// See `ResourceUpdate::DeleteFont`.
pub fn delete_font(&mut self, key: FontKey) {
self.resource_updates.push(ResourceUpdate::DeleteFont(key));
}
/// See `ResourceUpdate::AddFontInstance`.
pub fn add_font_instance(
&mut self,
key: FontInstanceKey,
font_key: FontKey,
glyph_size: f32,
options: Option<FontInstanceOptions>,
platform_options: Option<FontInstancePlatformOptions>,
variations: Vec<FontVariation>,
) {
self.resource_updates
.push(ResourceUpdate::AddFontInstance(AddFontInstance {
key,
font_key,
glyph_size,
options,
platform_options,
variations,
}));
}
/// See `ResourceUpdate::DeleteFontInstance`.
pub fn delete_font_instance(&mut self, key: FontInstanceKey) {
self.resource_updates.push(ResourceUpdate::DeleteFontInstance(key));
}
/// A hint that this transaction can be processed at a lower priority. High-
/// priority transactions can jump ahead of regular-priority transactions,
/// but both high- and regular-priority transactions are processed in order
/// relative to other transactions of the same priority.
pub fn set_low_priority(&mut self, low_priority: bool) {
self.low_priority = low_priority;
}
/// Returns whether this transaction is marked as low priority.
pub fn is_low_priority(&self) -> bool {
self.low_priority
}
}
///
pub struct DocumentTransaction {
///
pub document_id: DocumentId,
///
pub transaction: Transaction,
}
/// Represents a transaction in the format sent through the channel.
pub struct TransactionMsg {
///
pub document_id: DocumentId,
/// Changes that require re-building the scene.
pub scene_ops: Vec<SceneMsg>,
/// Changes to animated properties that do not require re-building the scene.
pub frame_ops: Vec<FrameMsg>,
/// Updates to resources that persist across display lists.
pub resource_updates: Vec<ResourceUpdate>,
/// Whether to trigger frame building and rendering if something has changed.
pub generate_frame: GenerateFrame,
/// Whether to force frame building and rendering even if no changes are internally
/// observed.
pub invalidate_rendered_frame: bool,
/// Whether to enforce that this transaction go through the scene builder.
pub use_scene_builder_thread: bool,
///
pub low_priority: bool,
/// Handlers to notify at certain points of the pipeline.
pub notifications: Vec<NotificationRequest>,
///
pub blob_rasterizer: Option<Box<dyn AsyncBlobImageRasterizer>>,
///
pub blob_requests: Vec<BlobImageParams>,
///
pub rasterized_blobs: Vec<(BlobImageRequest, BlobImageResult)>,
/// Collect various data along the rendering pipeline to display it in the embedded profiler.
pub profile: TransactionProfile,
/// Keep track of who asks rendering to happen.
pub render_reasons: RenderReasons,
}
impl fmt::Debug for TransactionMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "threaded={}, genframe={:?}, invalidate={}, low_priority={}",
self.use_scene_builder_thread,
self.generate_frame,
self.invalidate_rendered_frame,
self.low_priority,
).unwrap();
for scene_op in &self.scene_ops {
writeln!(f, "\t\t{:?}", scene_op).unwrap();
}
for frame_op in &self.frame_ops {
writeln!(f, "\t\t{:?}", frame_op).unwrap();
}
for resource_update in &self.resource_updates {
writeln!(f, "\t\t{:?}", resource_update).unwrap();
}
Ok(())
}
}
impl TransactionMsg {
/// Returns true if this transaction has no effect.
pub fn is_empty(&self) -> bool {
!self.generate_frame.as_bool() &&
!self.invalidate_rendered_frame &&
self.scene_ops.is_empty() &&
self.frame_ops.is_empty() &&
self.resource_updates.is_empty() &&
self.notifications.is_empty()
}
}
/// Creates an image resource with provided parameters.
///
/// Must be matched with a `DeleteImage` at some point to prevent memory leaks.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub struct AddImage {
/// A key to identify the image resource.
pub key: ImageKey,
/// Properties of the image.
pub descriptor: ImageDescriptor,
/// The pixels of the image.
pub data: ImageData,
/// An optional tiling scheme to apply when storing the image's data
/// on the GPU. Applies to both width and heights of the tiles.
///
/// Note that WebRender may internally chose to tile large images
/// even if this member is set to `None`.
pub tiling: Option<TileSize>,
}
/// Updates an already existing image resource.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub struct UpdateImage {
/// The key identfying the image resource to update.
pub key: ImageKey,
/// Properties of the image.
pub descriptor: ImageDescriptor,
/// The pixels of the image.
pub data: ImageData,
/// An optional dirty rect that lets WebRender optimize the amount of
/// data to transfer to the GPU.
///
/// The data provided must still represent the entire image.
pub dirty_rect: ImageDirtyRect,
}
/// Creates a blob-image resource with provided parameters.
///
/// Must be matched with a `DeleteImage` at some point to prevent memory leaks.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub struct AddBlobImage {
/// A key to identify the blob-image resource.
pub key: BlobImageKey,
/// Properties of the image.
pub descriptor: ImageDescriptor,
/// The blob-image's serialized commands.
pub data: Arc<BlobImageData>,
/// The portion of the plane in the blob-image's internal coordinate
/// system that is stretched to fill the image display item.
///
/// Unlike regular images, blob images are not limited in size. The
/// top-left corner of their internal coordinate system is also not
/// necessary at (0, 0).
/// This means that blob images can be updated to insert/remove content
/// in any direction to support panning and zooming.
pub visible_rect: DeviceIntRect,
/// The blob image's tile size to apply when rasterizing the blob-image
/// and when storing its rasterized data on the GPU.
/// Applies to both width and heights of the tiles.
///
/// All blob images are tiled.
pub tile_size: TileSize,
}
/// Updates an already existing blob-image resource.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub struct UpdateBlobImage {
/// The key identfying the blob-image resource to update.
pub key: BlobImageKey,
/// Properties of the image.
pub descriptor: ImageDescriptor,
/// The blob-image's serialized commands.
pub data: Arc<BlobImageData>,
/// See `AddBlobImage::visible_rect`.
pub visible_rect: DeviceIntRect,
/// An optional dirty rect that lets WebRender optimize the amount of
/// data to to rasterize and transfer to the GPU.
pub dirty_rect: BlobDirtyRect,
}
/// Creates a font resource.
///
/// Must be matched with a corresponding `ResourceUpdate::DeleteFont` at some point to prevent
/// memory leaks.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub enum AddFont {
///
Raw(FontKey, Arc<Vec<u8>>, u32),
///
Native(FontKey, NativeFontHandle),
}
/// Creates a font instance resource.
///
/// Must be matched with a corresponding `DeleteFontInstance` at some point
/// to prevent memory leaks.
#[derive(Clone)]
#[cfg_attr(any(feature = "serde"), derive(Deserialize, Serialize))]
pub struct AddFontInstance {
/// A key to identify the font instance.
pub key: FontInstanceKey,
/// The font resource's key.
pub font_key: FontKey,
/// Glyph size in app units.
pub glyph_size: f32,
///
pub options: Option<FontInstanceOptions>,
///
pub platform_options: Option<FontInstancePlatformOptions>,
///
pub variations: Vec<FontVariation>,
}
/// Frame messages affect building the scene.
pub enum SceneMsg {
///
UpdateEpoch(PipelineId, Epoch),
///
SetRootPipeline(PipelineId),
///
RemovePipeline(PipelineId),
///
SetDisplayList {
///
display_list: BuiltDisplayList,
///
epoch: Epoch,
///
pipeline_id: PipelineId,
///
background: Option<ColorF>,
///
viewport_size: LayoutSize,
},
///
SetDocumentView {
///
device_rect: DeviceIntRect,
},
/// Set the current quality / performance configuration for this document.
SetQualitySettings {
/// The set of available quality / performance config values.
settings: QualitySettings,
},
}
/// Frame messages affect frame generation (applied after building the scene).
pub enum FrameMsg {
///
UpdateEpoch(PipelineId, Epoch),
///
HitTest(WorldPoint, Sender<HitTestResult>),
///
RequestHitTester(Sender<Arc<dyn ApiHitTester>>),
///
SetScrollOffsets(ExternalScrollId, Vec<SampledScrollOffset>),
///
ResetDynamicProperties,
///
AppendDynamicProperties(DynamicProperties),
///
AppendDynamicTransformProperties(Vec<PropertyValue<LayoutTransform>>),
///
SetIsTransformAsyncZooming(bool, PropertyBindingId),
}
impl fmt::Debug for SceneMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match *self {
SceneMsg::UpdateEpoch(..) => "SceneMsg::UpdateEpoch",
SceneMsg::SetDisplayList { .. } => "SceneMsg::SetDisplayList",
SceneMsg::RemovePipeline(..) => "SceneMsg::RemovePipeline",
SceneMsg::SetDocumentView { .. } => "SceneMsg::SetDocumentView",
SceneMsg::SetRootPipeline(..) => "SceneMsg::SetRootPipeline",
SceneMsg::SetQualitySettings { .. } => "SceneMsg::SetQualitySettings",
})
}
}
impl fmt::Debug for FrameMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match *self {
FrameMsg::UpdateEpoch(..) => "FrameMsg::UpdateEpoch",
FrameMsg::HitTest(..) => "FrameMsg::HitTest",
FrameMsg::RequestHitTester(..) => "FrameMsg::RequestHitTester",
FrameMsg::SetScrollOffsets(..) => "FrameMsg::SetScrollOffsets",
FrameMsg::ResetDynamicProperties => "FrameMsg::ResetDynamicProperties",
FrameMsg::AppendDynamicProperties(..) => "FrameMsg::AppendDynamicProperties",
FrameMsg::AppendDynamicTransformProperties(..) => "FrameMsg::AppendDynamicTransformProperties",
FrameMsg::SetIsTransformAsyncZooming(..) => "FrameMsg::SetIsTransformAsyncZooming",
})
}
}
bitflags!{
/// Bit flags for WR stages to store in a capture.
// Note: capturing `FRAME` without `SCENE` is not currently supported.
pub struct CaptureBits: u8 {
///
const SCENE = 0x1;
///
const FRAME = 0x2;
///
const TILE_CACHE = 0x4;
///
const EXTERNAL_RESOURCES = 0x8;
}
}
bitflags!{
/// Mask for clearing caches in debug commands.
pub struct ClearCache: u8 {
///
const IMAGES = 0b1;
///
const GLYPHS = 0b10;
///
const GLYPH_DIMENSIONS = 0b100;
///
const RENDER_TASKS = 0b1000;
///
const TEXTURE_CACHE = 0b10000;
/// Clear render target pool
const RENDER_TARGETS = 0b100000;
}
}
/// Information about a loaded capture of each document
/// that is returned by `RenderBackend`.
#[derive(Clone, Debug)]
pub struct CapturedDocument {
///
pub document_id: DocumentId,
///
pub root_pipeline_id: Option<PipelineId>,
}
/// Update of the state of built-in debugging facilities.
#[derive(Clone)]
pub enum DebugCommand {
/// Sets the provided debug flags.
SetFlags(DebugFlags),
/// Configure if dual-source blending is used, if available.
EnableDualSourceBlending(bool),
/// Save a capture of all the documents state.
SaveCapture(PathBuf, CaptureBits),
/// Load a capture of all the documents state.
LoadCapture(PathBuf, Option<(u32, u32)>, Sender<CapturedDocument>),
/// Start capturing a sequence of scene/frame changes.
StartCaptureSequence(PathBuf, CaptureBits),
/// Stop capturing a sequence of scene/frame changes.
StopCaptureSequence,
/// Clear cached resources, forcing them to be re-uploaded from templates.
ClearCaches(ClearCache),
/// Enable/disable native compositor usage
EnableNativeCompositor(bool),
/// Sets the maximum amount of existing batches to visit before creating a new one.
SetBatchingLookback(u32),
/// Invalidate GPU cache, forcing the update from the CPU mirror.
InvalidateGpuCache,
/// Causes the scene builder to pause for a given amount of milliseconds each time it
/// processes a transaction.
SimulateLongSceneBuild(u32),
/// Set an override tile size to use for picture caches
SetPictureTileSize(Option<DeviceIntSize>),
}
/// Message sent by the `RenderApi` to the render backend thread.
pub enum ApiMsg {
/// Adds a new document namespace.
CloneApi(Sender<IdNamespace>),
/// Adds a new document namespace.
CloneApiByClient(IdNamespace),
/// Adds a new document with given initial size.
AddDocument(DocumentId, DeviceIntSize),
/// A message targeted at a particular document.
UpdateDocuments(Vec<Box<TransactionMsg>>),
/// Flush from the caches anything that isn't necessary, to free some memory.
MemoryPressure,
/// Collects a memory report.
ReportMemory(Sender<Box<MemoryReport>>),
/// Change debugging options.
DebugCommand(DebugCommand),
/// Message from the scene builder thread.
SceneBuilderResult(SceneBuilderResult),
}
impl fmt::Debug for ApiMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match *self {
ApiMsg::CloneApi(..) => "ApiMsg::CloneApi",
ApiMsg::CloneApiByClient(..) => "ApiMsg::CloneApiByClient",
ApiMsg::AddDocument(..) => "ApiMsg::AddDocument",
ApiMsg::UpdateDocuments(..) => "ApiMsg::UpdateDocuments",
ApiMsg::MemoryPressure => "ApiMsg::MemoryPressure",
ApiMsg::ReportMemory(..) => "ApiMsg::ReportMemory",
ApiMsg::DebugCommand(..) => "ApiMsg::DebugCommand",
ApiMsg::SceneBuilderResult(..) => "ApiMsg::SceneBuilderResult",
})
}
}
/// Allows the API to communicate with WebRender.
///
/// This object is created along with the `Renderer` and it's main use from a
/// user perspective is to create one or several `RenderApi` objects.
pub struct RenderApiSender {
api_sender: Sender<ApiMsg>,
scene_sender: Sender<SceneBuilderRequest>,
low_priority_scene_sender: Sender<SceneBuilderRequest>,
blob_image_handler: Option<Box<dyn BlobImageHandler>>,
shared_font_instances: SharedFontInstanceMap,
}
impl RenderApiSender {
/// Used internally by the `Renderer`.
pub fn new(
api_sender: Sender<ApiMsg>,
scene_sender: Sender<SceneBuilderRequest>,
low_priority_scene_sender: Sender<SceneBuilderRequest>,
blob_image_handler: Option<Box<dyn BlobImageHandler>>,
shared_font_instances: SharedFontInstanceMap,
) -> Self {
RenderApiSender {
api_sender,
scene_sender,
low_priority_scene_sender,
blob_image_handler,
shared_font_instances,<|fim▁hole|> }
}
/// Creates a new resource API object with a dedicated namespace.
pub fn create_api(&self) -> RenderApi {
let (sync_tx, sync_rx) = single_msg_channel();
let msg = ApiMsg::CloneApi(sync_tx);
self.api_sender.send(msg).expect("Failed to send CloneApi message");
let namespace_id = sync_rx.recv().expect("Failed to receive CloneApi reply");
RenderApi {
api_sender: self.api_sender.clone(),
scene_sender: self.scene_sender.clone(),
low_priority_scene_sender: self.low_priority_scene_sender.clone(),
namespace_id,
next_id: Cell::new(ResourceId(0)),
resources: ApiResources::new(
self.blob_image_handler.as_ref().map(|handler| handler.create_similar()),
self.shared_font_instances.clone(),
),
}
}
/// Creates a new resource API object with a dedicated namespace.
/// Namespace id is allocated by client.
///
/// The function could be used only when RendererOptions::namespace_alloc_by_client is true.
/// When the option is true, create_api() could not be used to prevent namespace id conflict.
pub fn create_api_by_client(&self, namespace_id: IdNamespace) -> RenderApi {
let msg = ApiMsg::CloneApiByClient(namespace_id);
self.api_sender.send(msg).expect("Failed to send CloneApiByClient message");
RenderApi {
api_sender: self.api_sender.clone(),
scene_sender: self.scene_sender.clone(),
low_priority_scene_sender: self.low_priority_scene_sender.clone(),
namespace_id,
next_id: Cell::new(ResourceId(0)),
resources: ApiResources::new(
self.blob_image_handler.as_ref().map(|handler| handler.create_similar()),
self.shared_font_instances.clone(),
),
}
}
}
/// The main entry point to interact with WebRender.
pub struct RenderApi {
api_sender: Sender<ApiMsg>,
scene_sender: Sender<SceneBuilderRequest>,
low_priority_scene_sender: Sender<SceneBuilderRequest>,
namespace_id: IdNamespace,
next_id: Cell<ResourceId>,
resources: ApiResources,
}
impl RenderApi {
/// Returns the namespace ID used by this API object.
pub fn get_namespace_id(&self) -> IdNamespace {
self.namespace_id
}
///
pub fn create_sender(&self) -> RenderApiSender {
RenderApiSender::new(
self.api_sender.clone(),
self.scene_sender.clone(),
self.low_priority_scene_sender.clone(),
self.resources.blob_image_handler.as_ref().map(|handler| handler.create_similar()),
self.resources.get_shared_font_instances(),
)
}
/// Add a document to the WebRender instance.
///
/// Instances can manage one or several documents (using the same render backend thread).
/// Each document will internally correspond to a single scene, and scenes are made of
/// one or several pipelines.
pub fn add_document(&self, initial_size: DeviceIntSize) -> DocumentId {
let new_id = self.next_unique_id();
self.add_document_with_id(initial_size, new_id)
}
/// See `add_document`
pub fn add_document_with_id(&self,
initial_size: DeviceIntSize,
id: u32) -> DocumentId {
window_size_sanity_check(initial_size);
let document_id = DocumentId::new(self.namespace_id, id);
// We send this message to both the render backend and the scene builder instead of having
// the scene builder thread forward it to the render backend as we do elswhere. This is because
// some transactions can skip the scene builder thread and we want to avoid them arriving before
// the render backend knows about the existence of the corresponding document id.
// It may not be necessary, though.
self.api_sender.send(
ApiMsg::AddDocument(document_id, initial_size)
).unwrap();
self.scene_sender.send(
SceneBuilderRequest::AddDocument(document_id, initial_size)
).unwrap();
document_id
}
/// Delete a document.
pub fn delete_document(&self, document_id: DocumentId) {
self.low_priority_scene_sender.send(
SceneBuilderRequest::DeleteDocument(document_id)
).unwrap();
}
/// Generate a new font key
pub fn generate_font_key(&self) -> FontKey {
let new_id = self.next_unique_id();
FontKey::new(self.namespace_id, new_id)
}
/// Generate a new font instance key
pub fn generate_font_instance_key(&self) -> FontInstanceKey {
let new_id = self.next_unique_id();
FontInstanceKey::new(self.namespace_id, new_id)
}
/// Gets the dimensions for the supplied glyph keys
///
/// Note: Internally, the internal texture cache doesn't store
/// 'empty' textures (height or width = 0)
/// This means that glyph dimensions e.g. for spaces (' ') will mostly be None.
pub fn get_glyph_dimensions(
&self,
key: FontInstanceKey,
glyph_indices: Vec<GlyphIndex>,
) -> Vec<Option<GlyphDimensions>> {
let (sender, rx) = single_msg_channel();
let msg = SceneBuilderRequest::GetGlyphDimensions(GlyphDimensionRequest {
key,
glyph_indices,
sender
});
self.low_priority_scene_sender.send(msg).unwrap();
rx.recv().unwrap()
}
/// Gets the glyph indices for the supplied string. These
/// can be used to construct GlyphKeys.
pub fn get_glyph_indices(&self, key: FontKey, text: &str) -> Vec<Option<u32>> {
let (sender, rx) = single_msg_channel();
let msg = SceneBuilderRequest::GetGlyphIndices(GlyphIndexRequest {
key,
text: text.to_string(),
sender,
});
self.low_priority_scene_sender.send(msg).unwrap();
rx.recv().unwrap()
}
/// Creates an `ImageKey`.
pub fn generate_image_key(&self) -> ImageKey {
let new_id = self.next_unique_id();
ImageKey::new(self.namespace_id, new_id)
}
/// Creates a `BlobImageKey`.
pub fn generate_blob_image_key(&self) -> BlobImageKey {
BlobImageKey(self.generate_image_key())
}
/// A Gecko-specific notification mechanism to get some code executed on the
/// `Renderer`'s thread, mostly replaced by `NotificationHandler`. You should
/// probably use the latter instead.
pub fn send_external_event(&self, evt: ExternalEvent) {
let msg = SceneBuilderRequest::ExternalEvent(evt);
self.low_priority_scene_sender.send(msg).unwrap();
}
/// Notify WebRender that now is a good time to flush caches and release
/// as much memory as possible.
pub fn notify_memory_pressure(&self) {
self.api_sender.send(ApiMsg::MemoryPressure).unwrap();
}
/// Synchronously requests memory report.
pub fn report_memory(&self, _ops: malloc_size_of::MallocSizeOfOps) -> MemoryReport {
let (tx, rx) = single_msg_channel();
self.api_sender.send(ApiMsg::ReportMemory(tx)).unwrap();
*rx.recv().unwrap()
}
/// Update debugging flags.
pub fn set_debug_flags(&self, flags: DebugFlags) {
let cmd = DebugCommand::SetFlags(flags);
self.api_sender.send(ApiMsg::DebugCommand(cmd)).unwrap();
}
/// Stop RenderBackend's task until shut down
pub fn stop_render_backend(&self) {
self.low_priority_scene_sender.send(SceneBuilderRequest::StopRenderBackend).unwrap();
}
/// Shut the WebRender instance down.
pub fn shut_down(&self, synchronously: bool) {
if synchronously {
let (tx, rx) = single_msg_channel();
self.low_priority_scene_sender.send(SceneBuilderRequest::ShutDown(Some(tx))).unwrap();
rx.recv().unwrap();
} else {
self.low_priority_scene_sender.send(SceneBuilderRequest::ShutDown(None)).unwrap();
}
}
/// Create a new unique key that can be used for
/// animated property bindings.
pub fn generate_property_binding_key<T: Copy>(&self) -> PropertyBindingKey<T> {
let new_id = self.next_unique_id();
PropertyBindingKey {
id: PropertyBindingId {
namespace: self.namespace_id,
uid: new_id,
},
_phantom: PhantomData,
}
}
#[inline]
fn next_unique_id(&self) -> u32 {
let ResourceId(id) = self.next_id.get();
self.next_id.set(ResourceId(id + 1));
id
}
// For use in Wrench only
#[doc(hidden)]
pub fn send_message(&self, msg: ApiMsg) {
self.api_sender.send(msg).unwrap();
}
/// Creates a transaction message from a single frame message.
fn frame_message(&self, msg: FrameMsg, document_id: DocumentId) -> Box<TransactionMsg> {
Box::new(TransactionMsg {
document_id,
scene_ops: Vec::new(),
frame_ops: vec![msg],
resource_updates: Vec::new(),
notifications: Vec::new(),
generate_frame: GenerateFrame::No,
invalidate_rendered_frame: false,
use_scene_builder_thread: false,
low_priority: false,
blob_rasterizer: None,
blob_requests: Vec::new(),
rasterized_blobs: Vec::new(),
profile: TransactionProfile::new(),
render_reasons: RenderReasons::empty(),
})
}
/// A helper method to send document messages.
fn send_frame_msg(&self, document_id: DocumentId, msg: FrameMsg) {
// This assertion fails on Servo use-cases, because it creates different
// `RenderApi` instances for layout and compositor.
//assert_eq!(document_id.0, self.namespace_id);
self.api_sender
.send(ApiMsg::UpdateDocuments(vec![self.frame_message(msg, document_id)]))
.unwrap()
}
/// Send a transaction to WebRender.
pub fn send_transaction(&mut self, document_id: DocumentId, transaction: Transaction) {
let mut transaction = transaction.finalize(document_id);
self.resources.update(&mut transaction);
if transaction.generate_frame.as_bool() {
transaction.profile.start_time(profiler::API_SEND_TIME);
transaction.profile.start_time(profiler::TOTAL_FRAME_CPU_TIME);
}
if transaction.use_scene_builder_thread {
let sender = if transaction.low_priority {
&mut self.low_priority_scene_sender
} else {
&mut self.scene_sender
};
sender.send(SceneBuilderRequest::Transactions(vec![transaction])).unwrap();
} else {
self.api_sender.send(ApiMsg::UpdateDocuments(vec![transaction])).unwrap();
}
}
/// Does a hit test on display items in the specified document, at the given
/// point. If a pipeline_id is specified, it is used to further restrict the
/// hit results so that only items inside that pipeline are matched. The vector
/// of hit results will contain all display items that match, ordered from
/// front to back.
pub fn hit_test(&self,
document_id: DocumentId,
point: WorldPoint,
) -> HitTestResult {
let (tx, rx) = single_msg_channel();
self.send_frame_msg(
document_id,
FrameMsg::HitTest(point, tx)
);
rx.recv().unwrap()
}
/// Synchronously request an object that can perform fast hit testing queries.
pub fn request_hit_tester(&self, document_id: DocumentId) -> HitTesterRequest {
let (tx, rx) = single_msg_channel();
self.send_frame_msg(
document_id,
FrameMsg::RequestHitTester(tx)
);
HitTesterRequest { rx }
}
// Some internal scheduling magic that leaked into the API.
// Buckle up and see APZUpdater.cpp for more info about what this is about.
#[doc(hidden)]
pub fn wake_scene_builder(&self) {
self.scene_sender.send(SceneBuilderRequest::WakeUp).unwrap();
}
/// Block until a round-trip to the scene builder thread has completed. This
/// ensures that any transactions (including ones deferred to the scene
/// builder thread) have been processed.
pub fn flush_scene_builder(&self) {
let (tx, rx) = single_msg_channel();
self.low_priority_scene_sender.send(SceneBuilderRequest::Flush(tx)).unwrap();
rx.recv().unwrap(); // Block until done.
}
/// Save a capture of the current frame state for debugging.
pub fn save_capture(&self, path: PathBuf, bits: CaptureBits) {
let msg = ApiMsg::DebugCommand(DebugCommand::SaveCapture(path, bits));
self.send_message(msg);
}
/// Load a capture of the current frame state for debugging.
pub fn load_capture(&self, path: PathBuf, ids: Option<(u32, u32)>) -> Vec<CapturedDocument> {
// First flush the scene builder otherwise async scenes might clobber
// the capture we are about to load.
self.flush_scene_builder();
let (tx, rx) = unbounded_channel();
let msg = ApiMsg::DebugCommand(DebugCommand::LoadCapture(path, ids, tx));
self.send_message(msg);
let mut documents = Vec::new();
while let Ok(captured_doc) = rx.recv() {
documents.push(captured_doc);
}
documents
}
/// Start capturing a sequence of frames.
pub fn start_capture_sequence(&self, path: PathBuf, bits: CaptureBits) {
let msg = ApiMsg::DebugCommand(DebugCommand::StartCaptureSequence(path, bits));
self.send_message(msg);
}
/// Stop capturing sequences of frames.
pub fn stop_capture_sequence(&self) {
let msg = ApiMsg::DebugCommand(DebugCommand::StopCaptureSequence);
self.send_message(msg);
}
/// Update the state of builtin debugging facilities.
pub fn send_debug_cmd(&self, cmd: DebugCommand) {
let msg = ApiMsg::DebugCommand(cmd);
self.send_message(msg);
}
/// Update a instance-global parameter.
pub fn set_parameter(&mut self, parameter: Parameter) {
if let Parameter::Bool(BoolParameter::Multithreading, enabled) = parameter {
self.resources.enable_multithreading(enabled);
}
let _ = self.low_priority_scene_sender.send(
SceneBuilderRequest::SetParameter(parameter)
);
}
}
impl Drop for RenderApi {
fn drop(&mut self) {
let msg = SceneBuilderRequest::ClearNamespace(self.namespace_id);
let _ = self.low_priority_scene_sender.send(msg);
}
}
fn window_size_sanity_check(size: DeviceIntSize) {
// Anything bigger than this will crash later when attempting to create
// a render task.
use crate::api::MAX_RENDER_TASK_SIZE;
if size.width > MAX_RENDER_TASK_SIZE || size.height > MAX_RENDER_TASK_SIZE {
panic!("Attempting to create a {}x{} window/document", size.width, size.height);
}
}
/// Collection of heap sizes, in bytes.
/// cbindgen:derive-eq=false
/// cbindgen:derive-ostream=false
#[repr(C)]
#[allow(missing_docs)]
#[derive(AddAssign, Clone, Debug, Default)]
pub struct MemoryReport {
//
// CPU Memory.
//
pub clip_stores: usize,
pub gpu_cache_metadata: usize,
pub gpu_cache_cpu_mirror: usize,
pub render_tasks: usize,
pub hit_testers: usize,
pub fonts: usize,
pub weak_fonts: usize,
pub images: usize,
pub rasterized_blobs: usize,
pub shader_cache: usize,
pub interning: InterningMemoryReport,
pub display_list: usize,
pub upload_staging_memory: usize,
pub swgl: usize,
//
// GPU memory.
//
pub gpu_cache_textures: usize,
pub vertex_data_textures: usize,
pub render_target_textures: usize,
pub picture_tile_textures: usize,
pub atlas_textures: usize,
pub standalone_textures: usize,
pub texture_cache_structures: usize,
pub depth_target_textures: usize,
pub texture_upload_pbos: usize,
pub swap_chain: usize,
pub render_texture_hosts: usize,
pub upload_staging_textures: usize,
}<|fim▁end|> | |
<|file_name|>api.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import json
import operator
from django.core.exceptions import PermissionDenied
from django.db.models import ProtectedError, Q
from django.forms.models import modelform_factory
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.functional import cached_property
from django.views.generic import View
from pootle.core.http import (
JsonResponse, JsonResponseBadRequest, JsonResponseForbidden,
JsonResponseNotFound
)
class JSONDecodeError(ValueError):
pass
class APIView(View):
"""View to implement internal RESTful APIs.
Based on djangbone https://github.com/af/djangbone
"""
# Model on which this view operates. Setting this is required
model = None
# Base queryset for accessing data. If `None`, model's default manager will
# be used
base_queryset = None
# Set this to restrict the view to a subset of the available methods
restrict_to_methods = None
# Field names to be included
fields = ()
# Individual forms to use for each method. By default it'll auto-populate
# model forms built using `self.model` and `self.fields`
add_form_class = None
edit_form_class = None
# Permission classes implement logic to determine whether the request
# should be permitted. Empty list means no permission-checking.
permission_classes = []
# Tuple of sensitive field names that will be excluded from any serialized
# responses
sensitive_field_names = ('password', 'pw')
# Set to an integer to enable GET pagination
page_size = None
# HTTP GET parameter to use for accessing pages
page_param_name = 'p'
# HTTP GET parameter to use for search queries
search_param_name = 'q'
# Field names in which searching will be allowed
search_fields = None
@property
def allowed_methods(self):
methods = [m for m in self.http_method_names if hasattr(self, m)]
if self.restrict_to_methods is not None:
restricted_to = map(lambda x: x.lower(), self.restrict_to_methods)
methods = filter(lambda x: x in restricted_to, methods)
return methods
def __init__(self, *args, **kwargs):
if self.model is None:
raise ValueError('No model class specified.')
self.pk_field_name = self.model._meta.pk.name
if self.base_queryset is None:
self.base_queryset = self.model._default_manager
self._init_fields()
self._init_forms()
return super(APIView, self).__init__(*args, **kwargs)
def _init_fields(self):
if len(self.fields) < 1:
form = self.add_form_class or self.edit_form_class
if form is not None:
self.fields = form._meta.fields
else: # Assume all fields by default
self.fields = (f.name for f in self.model._meta.fields)
self.serialize_fields = (f for f in self.fields if
f not in self.sensitive_field_names)
def _init_forms(self):
if 'post' in self.allowed_methods and self.add_form_class is None:
self.add_form_class = modelform_factory(self.model,
fields=self.fields)
if 'put' in self.allowed_methods and self.edit_form_class is None:
self.edit_form_class = modelform_factory(self.model,
fields=self.fields)
@cached_property
def request_data(self):
try:
return json.loads(self.request.body)
except ValueError:
raise JSONDecodeError
def get_permissions(self):
"""Returns permission handler instances required for a particular view."""
return [permission() for permission in self.permission_classes]
def check_permissions(self, request):
"""Checks whether the view is allowed to process the request or not.
"""
for permission in self.get_permissions():
if not permission.has_permission(request, self):
raise PermissionDenied
def check_object_permissions(self, request, obj):
for permission in self.get_permissions():
if not permission.has_object_permission(request, self, obj):
raise PermissionDenied
def handle_exception(self, exc):
"""Handles response exceptions."""
if isinstance(exc, Http404):
return JsonResponseNotFound({
'msg': 'Not found',
})<|fim▁hole|>
if isinstance(exc, PermissionDenied):
return JsonResponseForbidden({
'msg': 'Permission denied.',
})
if isinstance(exc, JSONDecodeError):
return JsonResponseBadRequest({
'msg': 'Invalid JSON data',
})
raise
def dispatch(self, request, *args, **kwargs):
try:
self.check_permissions(request)
if request.method.lower() in self.allowed_methods:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
except Exception as exc:
return self.handle_exception(exc)
def get(self, request, *args, **kwargs):
"""GET handler."""
if self.kwargs.get(self.pk_field_name, None) is not None:
object = self.get_object()
return JsonResponse(self.object_to_values(object))
return self.get_collection(request, *args, **kwargs)
def get_object(self):
"""Returns a single model instance."""
obj = get_object_or_404(
self.base_queryset, pk=self.kwargs[self.pk_field_name],
)
self.check_object_permissions(self.request, obj)
return obj
def get_collection(self, request, *args, **kwargs):
"""Retrieve a full collection."""
return JsonResponse(self.qs_to_values(self.base_queryset))
def get_form_kwargs(self):
kwargs = {
'data': self.request_data,
}
if (self.pk_field_name in self.kwargs and
self.kwargs[self.pk_field_name] is not None):
kwargs.update({
'instance': self.get_object(),
})
return kwargs
def post(self, request, *args, **kwargs):
"""Creates a new model instance.
The form to be used can be customized by setting
`self.add_form_class`. By default a model form will be used with
the fields from `self.fields`.
"""
form = self.add_form_class(**self.get_form_kwargs())
if form.is_valid():
new_object = form.save()
return JsonResponse(self.object_to_values(new_object))
return self.form_invalid(form)
def put(self, request, *args, **kwargs):
"""Update the current model."""
if self.pk_field_name not in self.kwargs:
return self.status_msg('PUT is not supported for collections',
status=405)
form = self.edit_form_class(**self.get_form_kwargs())
if form.is_valid():
updated_object = form.save()
return JsonResponse(self.object_to_values(updated_object))
return self.form_invalid(form)
def delete(self, request, *args, **kwargs):
"""Delete the model and return its JSON representation."""
if self.pk_field_name not in kwargs:
return self.status_msg('DELETE is not supported for collections',
status=405)
obj = self.get_object()
try:
obj.delete()
return JsonResponse({})
except ProtectedError as e:
return self.status_msg(e[0], status=405)
def object_to_values(self, object):
"""Convert an object to values for serialization."""
return {
field: getattr(object, field) for field in self.serialize_fields
}
def qs_to_values(self, queryset):
"""Convert a queryset to values for further serialization.
An array of objects in `models` and the total object count in
`count` is returned.
"""
search_keyword = self.request.GET.get(self.search_param_name, None)
if search_keyword is not None:
filter_by = self.get_search_filter(search_keyword)
queryset = queryset.filter(filter_by)
values = queryset.values(*self.serialize_fields)
# Process pagination options if they are enabled
if isinstance(self.page_size, int):
try:
page_param = self.request.GET.get(self.page_param_name, 1)
page_number = int(page_param)
offset = (page_number - 1) * self.page_size
except ValueError:
offset = 0
values = values[offset:offset+self.page_size]
return_values = {
'models': list(values),
'count': queryset.count(),
}
return return_values
def get_search_filter(self, keyword):
search_fields = getattr(self, 'search_fields', None)
if search_fields is None:
search_fields = self.fields # Assume all fields
field_queries = list(
zip(map(lambda x: '%s__icontains' % x, search_fields),
(keyword,)*len(search_fields))
)
lookups = [Q(x) for x in field_queries]
return reduce(operator.or_, lookups)
def status_msg(self, msg, status=400):
return JsonResponse({'msg': msg}, status=status)
def form_invalid(self, form):
return JsonResponse({'errors': form.errors}, status=400)<|fim▁end|> | |
<|file_name|>bom_textdoc.py<|end_file_name|><|fim▁begin|># This file is part of Invenio.
# Copyright (C) 2007, 2008, 2009, 2010, 2011, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibObject Module providing BibObject prividing features for documents containing text (not necessarily as the main part of the content)"""
import os
import re
from datetime import datetime
from invenio.config import CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES
from invenio.legacy.bibdocfile.api import BibDoc, InvenioBibDocFileError
from invenio.legacy.dbquery import run_sql
from invenio.ext.logging import register_exception
_RE_PERFORM_OCR = re.compile(CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES)
class BibTextDoc(BibDoc):
def get_text(self, version=None):
"""
@param version: the requested version. If not set, the latest version
will be used.
@type version: integer
@return: the textual content corresponding to the specified version<|fim▁hole|> """
if version is None:
version = self.get_latest_version()
if self.has_text(version):
return open(os.path.join(self.basedir, '.text;%i' % version)).read()
else:
return ""
def is_ocr_required(self):
"""
Return True if this document require OCR in order to extract text from it.
"""
for bibrec_link in self.bibrec_links:
if _RE_PERFORM_OCR.match(bibrec_link['docname']):
return True
return False
def get_text_path(self, version=None):
"""
@param version: the requested version. If not set, the latest version
will be used.
@type version: int
@return: the full path to the textual content corresponding to the specified version
of the document.
@rtype: string
"""
if version is None:
version = self.get_latest_version()
if self.has_text(version):
return os.path.join(self.basedir, '.text;%i' % version)
else:
return ""
def extract_text(self, version=None, perform_ocr=False, ln='en'):
"""
Try what is necessary to extract the textual information of a document.
@param version: the version of the document for which text is required.
If not specified the text will be retrieved from the last version.
@type version: integer
@param perform_ocr: whether to perform OCR.
@type perform_ocr: bool
@param ln: a two letter language code to give as a hint to the OCR
procedure.
@type ln: string
@raise InvenioBibDocFileError: in case of error.
@note: the text is extracted and cached for later use. Use L{get_text}
to retrieve it.
"""
raise RuntimeError("Text extraction is not implemented.")
def pdf_a_p(self):
"""
@return: True if this document contains a PDF in PDF/A format.
@rtype: bool"""
return self.has_flag('PDF/A', 'pdf')
def has_text(self, require_up_to_date=False, version=None):
"""
Return True if the text of this document has already been extracted.
@param require_up_to_date: if True check the text was actually
extracted after the most recent format of the given version.
@type require_up_to_date: bool
@param version: a version for which the text should have been
extracted. If not specified the latest version is considered.
@type version: integer
@return: True if the text has already been extracted.
@rtype: bool
"""
if version is None:
version = self.get_latest_version()
if os.path.exists(os.path.join(self.basedir, '.text;%i' % version)):
if not require_up_to_date:
return True
else:
docfiles = self.list_version_files(version)
text_md = datetime.fromtimestamp(os.path.getmtime(os.path.join(self.basedir, '.text;%i' % version)))
for docfile in docfiles:
if text_md <= docfile.md:
return False
return True
return False
def __repr__(self):
return 'BibTextDoc(%s, %s, %s)' % (repr(self.id), repr(self.doctype), repr(self.human_readable))
def supports(doctype, extensions):
return doctype == "Fulltext" or reduce(lambda x, y: x or y.startswith(".pdf") or y.startswith(".ps") , extensions, False)
def create_instance(docid=None, doctype='Main', human_readable=False, # pylint: disable=W0613
initial_data = None):
return BibTextDoc(docid=docid, human_readable=human_readable,
initial_data = initial_data)<|fim▁end|> | of the document.
@rtype: string |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for tumuli project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""<|fim▁hole|>
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tumuli.settings")
application = get_wsgi_application()<|fim▁end|> | |
<|file_name|>a.js<|end_file_name|><|fim▁begin|><|fim▁hole|>print('this is a');
print(__FILE__, __LINE__, __DIR__);
load('./b.js'); // 不能简单的加载同目录的 b,因为 engine.get(FILENAME) 未变<|fim▁end|> | |
<|file_name|>cfgs-on-items.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: --cfg fooA --cfg fooB
// fooA AND !bar
#[cfg(fooA, not(bar))]
fn foo1() -> int { 1 }
// !fooA AND !bar
#[cfg(not(fooA), not(bar))]
fn foo2() -> int { 2 }
// fooC OR (fooB AND !bar)
#[cfg(fooC)]
#[cfg(fooB, not(bar))]
fn foo2() -> int { 3 }<|fim▁hole|>// fooA AND bar
#[cfg(fooA, bar)]
fn foo3() -> int { 2 }
// !(fooA AND bar)
#[cfg(not(fooA, bar))]
fn foo3() -> int { 3 }
pub fn main() {
assert_eq!(1, foo1());
assert_eq!(3, foo2());
assert_eq!(3, foo3());
}<|fim▁end|> | |
<|file_name|>transform_assignable_fragment_spreads_in_updatable_queries.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::{NamedItem, WithLocation};
use graphql_ir::{
associated_data_impl, Directive, FragmentDefinition, FragmentSpread, OperationDefinition,<|fim▁hole|>use intern::string_key::{Intern, StringKey};
use lazy_static::lazy_static;
use schema::{SDLSchema, Schema, Type};
use std::sync::Arc;
lazy_static! {
static ref UPDATABLE_DIRECTIVE: StringKey = "updatable".intern();
pub static ref ASSIGNABLE_DIRECTIVE_FOR_TYPEGEN: StringKey = "__assignable".intern();
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Copy)]
pub enum TypeConditionInfo {
Abstract,
Concrete { concrete_type: StringKey },
}
associated_data_impl!(TypeConditionInfo);
/// Transform all of the updatable queries and fragments in a program by adding
/// an internal-only directive on every fragment spread containing
/// TypeConditionInfo in the associated data.
///
/// This directive is then consumed by the typegen step (which does not have
/// access to the schema) to determine how to create the setter for a given linked
/// field.
///
/// The value that is passed to the setter of a singular linked field is the union of:
/// - null
/// - { __typename: 'ConcreteTypeName', __id: string, $fragmentRefs: spreadRef }
/// for each spread of a concrete type
/// - { __isFragmentName: string, __id: string, $fragmentRefs: spreadRef }
/// for each spread of an abstract type
///
/// For a plural linked field, replace null with [] and the unions with an array of
/// the union of those objects.
pub fn transform_assignable_fragment_spreads_in_updatable_queries(program: &Program) -> Program {
let mut transform = AssignableFragmentSpreadForUpdatable { program };
transform
.transform_program(program)
.replace_or_else(|| program.clone())
}
struct AssignableFragmentSpreadForUpdatable<'s> {
program: &'s Program,
}
impl<'s> Transformer for AssignableFragmentSpreadForUpdatable<'s> {
const NAME: &'static str = "AssignableFragmentTransformForUpdatable";
const VISIT_ARGUMENTS: bool = false;
const VISIT_DIRECTIVES: bool = false;
fn transform_operation(
&mut self,
operation: &OperationDefinition,
) -> Transformed<OperationDefinition> {
if operation.directives.named(*UPDATABLE_DIRECTIVE).is_some() {
self.default_transform_operation(operation)
} else {
Transformed::Keep
}
}
fn transform_fragment(
&mut self,
operation: &FragmentDefinition,
) -> Transformed<FragmentDefinition> {
if operation.directives.named(*UPDATABLE_DIRECTIVE).is_some() {
self.default_transform_fragment(operation)
} else {
Transformed::Keep
}
}
fn transform_fragment_spread(
&mut self,
fragment_spread: &FragmentSpread,
) -> Transformed<Selection> {
let fragment_definition = self
.program
.fragment(fragment_spread.fragment.item)
.expect("Expected fragment to exist.");
let spread = fragment_spread.clone();
Transformed::Replace(Selection::FragmentSpread(Arc::new(FragmentSpread {
directives: vec![get_directive(
fragment_definition.type_condition,
&self.program.schema,
)],
..spread
})))
}
}
fn get_directive(type_condition: Type, schema: &SDLSchema) -> Directive {
Directive {
name: WithLocation::generated(*ASSIGNABLE_DIRECTIVE_FOR_TYPEGEN),
arguments: vec![],
data: Some(Box::new(get_associated_data(type_condition, schema))),
}
}
fn get_associated_data(type_condition: Type, schema: &SDLSchema) -> TypeConditionInfo {
if type_condition.is_abstract_type() {
TypeConditionInfo::Abstract
} else {
TypeConditionInfo::Concrete {
concrete_type: schema.get_type_name(type_condition),
}
}
}<|fim▁end|> | Program, Selection, Transformed, Transformer,
}; |
<|file_name|>cpu.rs<|end_file_name|><|fim▁begin|>use anyhow::{anyhow, Result};
use cnx::text::{Attributes, Text};
use cnx::widgets::{Widget, WidgetStream};
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::time::Duration;
use tokio::time;
use tokio_stream::wrappers::IntervalStream;
use tokio_stream::StreamExt;
/// Represents CPU widget used to show current CPU consumptiong
pub struct Cpu {
attr: Attributes,
cpu_data: CpuData,
render: Option<Box<dyn Fn(u64) -> String>>,
}
impl Cpu {
/// Creates a new [`Cpu`] widget.
///
/// Arguments
///
/// * `attr` - Represents `Attributes` which controls properties like
/// `Font`, foreground and background color etc.
///
/// * `render` - We use the closure to control the way output is
/// displayed in the bar. `u64` represents the current CPU usage
/// in percentage.
///
/// # Examples
///
/// ```
/// # #[macro_use]
/// # extern crate cnx;
/// #
/// # use cnx::*;
/// # use cnx::text::*;
/// # use cnx_contrib::widgets::cpu::*;
/// # use anyhow::Result;
/// #
/// # fn run() -> Result<()> {
/// let attr = Attributes {
/// font: Font::new("SourceCodePro 21"),
/// fg_color: Color::white(),
/// bg_color: None,
/// padding: Padding::new(8.0, 8.0, 0.0, 0.0),
/// };
///
/// let mut cnx = Cnx::new(Position::Top);
/// cnx.add_widget(Cpu::new(attr, None)?);
/// # Ok(())
/// # }
/// # fn main() { run().unwrap(); }
/// ```
pub fn new(attr: Attributes, render: Option<Box<dyn Fn(u64) -> String>>) -> Result<Self> {
let cpu_data = CpuData::get_values()?;
Ok(Cpu {
attr,
cpu_data,
render,
})
}
fn tick(&mut self) -> Result<Vec<Text>> {
let cpu_data = CpuData::get_values()?;
// https://github.com/jaor/xmobar/blob/61d075d3c275366c3344d59c058d7dd0baf21ef2/src/Xmobar/Plugins/Monitors/Cpu.hs#L128
let previous = &self.cpu_data;
let current = cpu_data;
let diff_total = (current.user_time - previous.user_time)
+ (current.nice_time - previous.nice_time)
+ (current.system_time - previous.system_time)
+ (current.idle_time - previous.idle_time)
+ (current.iowait_time - previous.iowait_time)
+ (current.total_time - previous.total_time);
let percentage = match diff_total {
0 => 0.0,
_ => (current.total_time - previous.total_time) as f64 / diff_total as f64,
};
let cpu_usage = (percentage * 100.0) as u64;
let text = self
.render
.as_ref()
.map_or(format!("{} %", cpu_usage), |x| (x)(cpu_usage));
self.cpu_data = current;
let texts = vec![Text {
attr: self.attr.clone(),
text,
stretch: false,
markup: true,<|fim▁hole|> }
}
struct CpuData {
user_time: i64,
nice_time: i64,
system_time: i64,
idle_time: i64,
total_time: i64,
iowait_time: i64,
}
impl CpuData {
fn get_values() -> Result<CpuData> {
// https://www.kernel.org/doc/Documentation/filesystems/proc.txt
let file = File::open("/proc/stat")?;
let mut cpu_line = String::new();
let mut reader = BufReader::new(file);
reader.read_line(&mut cpu_line)?;
let val: Vec<&str> = cpu_line
.split(' ')
.filter(|item| item != &"cpu" && !item.is_empty())
.collect();
let mut cpu_data = CpuData {
user_time: 0,
nice_time: 0,
system_time: 0,
idle_time: 0,
total_time: 0,
iowait_time: 0,
};
match val[..] {
[ref user, ref nice, ref system, ref idle, ref iowait, ..] => {
let user_time = user.parse()?;
let nice_time = nice.parse()?;
let system_time = system.parse()?;
let idle_time = idle.parse()?;
let iowait_time = iowait.parse()?;
cpu_data.user_time = user_time;
cpu_data.nice_time = nice_time;
cpu_data.system_time = system_time;
cpu_data.idle_time = idle_time;
cpu_data.iowait_time = iowait_time;
cpu_data.total_time = user_time + nice_time + system_time;
}
_ => return Err(anyhow!("Missing data in /proc/stat")),
}
Ok(cpu_data)
}
}
impl Widget for Cpu {
fn into_stream(mut self: Box<Self>) -> Result<WidgetStream> {
let ten_seconds = Duration::from_secs(10);
let interval = time::interval(ten_seconds);
let stream = IntervalStream::new(interval).map(move |_| self.tick());
Ok(Box::pin(stream))
}
}<|fim▁end|> | }];
Ok(texts) |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod route; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from aiohttp import web
from aiohttp_session import get_session, SESSION_KEY as SESSION_COOKIE_NAME
from aioweb.middleware.csrf.templatetags import CsrfTag, CsrfRawTag
from aioweb.util import awaitable
from aioweb.modules.template.backends.jinja2 import APP_KEY as JINJA_APP_KEY
import random, string
from aiohttp_session import get_session
from hashlib import sha256
CSRF_FIELD_NAME = 'csrftoken'
CSRF_SESSION_NAME = 'csrf_token'
CSRF_HEADER_NAME = 'X-Csrf-Token'
CSRF_COOKIE_NAME = 'Csrf-Token'
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
CSRF_LENGTH = 128
CSRF_SALT_LENGTH = 6
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_TOKEN_SEPARATOR = '-'
def generate_csrf_secret():
return ''.join([random.choice(CSRF_ALLOWED_CHARS) for c in range(CSRF_LENGTH)])
def generate_salt():
return ''.join([random.choice(CSRF_ALLOWED_CHARS) for c in range(CSRF_SALT_LENGTH)])
async def get_secret(request):
"""
Returns the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
"""
session = await get_session(request)
if CSRF_SESSION_NAME in session and session[CSRF_SESSION_NAME]:
return session[CSRF_SESSION_NAME]
return await set_secret(request)
def make_token(salt, secret):
return "{}{}{}".format(salt, CSRF_TOKEN_SEPARATOR,
sha256("{}{}{}".format(salt, CSRF_TOKEN_SEPARATOR, secret).encode()).hexdigest())
async def get_token(request):
salt = generate_salt()
secret = await get_secret(request)
return make_token(salt, secret)
async def set_secret(request):
session = await get_session(request)
session[CSRF_SESSION_NAME] = generate_csrf_secret()
return session[CSRF_SESSION_NAME]
def validate_token(token, secret):
salt, hashed = token.split('-', maxsplit=1)
return token == make_token(salt, secret)
async def middleware(app, handler):
async def middleware_handler(request):
setattr(request, 'csrf_token', await get_token(request))
try:
response = await awaitable(handler(request))
except web.HTTPException as e:
raise e
return response
return middleware_handler
def setup(app):
app[JINJA_APP_KEY].add_extension(CsrfTag)
app[JINJA_APP_KEY].add_extension(CsrfRawTag)
async def pre_dispatch(request, controller, actionName):
reason = None
check_ok = True
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
action = getattr(controller, actionName)
if not getattr(action, 'csrf_disabled', False):
check_ok = False
token = request.headers.get(CSRF_HEADER_NAME)
if not token:
data = await request.post()
token = data.get(CSRF_FIELD_NAME)
if token:
if validate_token(token, await get_secret(request)):
check_ok = True
else:
reason = REASON_BAD_TOKEN<|fim▁hole|> else:
reason = REASON_NO_CSRF_COOKIE
if not check_ok:
raise web.HTTPForbidden(reason=reason)<|fim▁end|> | |
<|file_name|>cvedbsync.py<|end_file_name|><|fim▁begin|>"""Update vulnerability sources."""
from selinon import StoragePool
from f8a_worker.base import BaseTask<|fim▁hole|>from f8a_worker.workers import CVEcheckerTask
class CVEDBSyncTask(BaseTask):
"""Update vulnerability sources."""
def components_to_scan(self, previous_sync_timestamp, only_already_scanned):
"""Get EPV that were recently updated in OSS Index, so they can contain new vulnerabilities.
Get components (e:p:v) that were recently (since previous_sync_timestamp) updated
in OSS Index, which means that they can contain new vulnerabilities.
:param previous_sync_timestamp: timestamp of previous check
:param only_already_scanned: include already scanned components only
:return: generator of e:p:v
"""
# TODO: reduce cyclomatic complexity
to_scan = []
rdb = StoragePool.get_connected_storage('BayesianPostgres')
for ecosystem in ['nuget']:
ecosystem_solver = get_ecosystem_solver(self.storage.get_ecosystem(ecosystem),
with_parser=OSSIndexDependencyParser())
self.log.debug("Retrieving new %s vulnerabilities from OSS Index", ecosystem)
ossindex_updated_packages = CVEcheckerTask.\
query_ossindex_vulnerability_fromtill(ecosystem=ecosystem,
from_time=previous_sync_timestamp)
for ossindex_updated_package in ossindex_updated_packages:
if Ecosystem.by_name(rdb.session, ecosystem).is_backed_by(EcosystemBackend.maven):
package_name = "{g}:{n}".format(g=ossindex_updated_package['group'],
n=ossindex_updated_package['name'])
else:
package_name = ossindex_updated_package['name']
package_affected_versions = set()
for vulnerability in ossindex_updated_package.get('vulnerabilities', []):
for version_string in vulnerability.get('versions', []):
try:
resolved_versions = ecosystem_solver.\
solve(["{} {}".format(package_name, version_string)],
all_versions=True)
except Exception:
self.log.exception("Failed to resolve %r for %s:%s", version_string,
ecosystem, package_name)
continue
resolved_versions = resolved_versions.get(package_name, [])
if only_already_scanned:
already_scanned_versions =\
[ver for ver in resolved_versions if
self.storage.get_analysis_count(ecosystem, package_name, ver) > 0]
package_affected_versions.update(already_scanned_versions)
else:
package_affected_versions.update(resolved_versions)
for version in package_affected_versions:
to_scan.append({
'ecosystem': ecosystem,
'name': package_name,
'version': version
})
msg = "Components to be {prefix}scanned for vulnerabilities: {components}".\
format(prefix="re-" if only_already_scanned else "",
components=to_scan)
self.log.info(msg)
return to_scan
def execute(self, arguments):
"""Start the task.
:param arguments: optional argument 'only_already_scanned' to run only
on already analysed packages
:return: EPV dict describing which packages should be analysed
"""
only_already_scanned = arguments.pop('only_already_scanned', True) if arguments else True
ignore_modification_time = (arguments.pop('ignore_modification_time', False)
if arguments else False)
CVEcheckerTask.update_victims_cve_db_on_s3()
self.log.debug('Updating sync associated metadata')
s3 = StoragePool.get_connected_storage('S3VulnDB')
previous_sync_timestamp = s3.update_sync_date()
if ignore_modification_time:
previous_sync_timestamp = 0
# get components which might have new vulnerabilities since previous sync
to_scan = self.components_to_scan(previous_sync_timestamp, only_already_scanned)
return {'modified': to_scan}<|fim▁end|> | from f8a_worker.enums import EcosystemBackend
from f8a_worker.models import Ecosystem
from f8a_worker.solver import get_ecosystem_solver, OSSIndexDependencyParser |
<|file_name|>finalGraph.py<|end_file_name|><|fim▁begin|>import sys, math, os
import matplotlib.pyplot as plt
def main():
# Check that there's at least one argument
if len(sys.argv) < 2:
print("Usage python {} <file1> [<file2> ...]".format(sys.argv[0]))
return 1
# Automatically detect if decayed
if "decayed" in sys.argv[1]:
plotDecayed = True
else:
plotDecayed = False
# Read input file
fil = "finalGraph.in"
if os.path.isfile(fil):
with open(fil, "r") as fread:
lstyles = fread.readline().strip().split()
labs = []
for line in fread:
labs.append(line.strip())
lowZ = 27 # Lowest z value to represent
# Read "species.dat" and store all the values in lists
species = "../../data/species.dat"
atomicNum = []; atomicMass = []; namesZ = {}<|fim▁hole|> lnlst = line.split()
# Correct special names
if lnlst[1] == "d" or lnlst[2] == "0":
lnlst[1] = "h"
# Now relate positions with atomic numbers, atomic masses, and names
zNum = int(lnlst[0]) - int(lnlst[2])
atomicNum.append(zNum)
atomicMass.append(int(lnlst[0]))
namesZ[lnlst[1]] = zNum
# Read all initial solar values
solar = "../../data/solarVals.dat"
solarValues = {}
with open(solar, "r") as fread:
for line in fread:
lnlst = line.split()
isotName = lnlst[0] + lnlst[2]
# Add mass fraction value per atomic number
key = namesZ[lnlst[0]]; val = float(lnlst[1])*float(lnlst[2])
solarValues[key] = solarValues.get(key, 0) + val
# Go file by file
numDens = []
for archivo in sys.argv[1:]:
# Open file for reading
dens = []
fread = open(archivo, "r")
# Each line has mass, temperature, rho, radiat
# and elements in number fraction
newline = None
for line in fread:
if "#" in line:
continue
lnlst = line.split()
if len(lnlst) == 0:
if plotDecayed:
break
else:
continue
if not plotDecayed:
# Surface (newline[0] is the mass)
prevline = newline
newline = [float(x) for x in lnlst]
if newline[0] > 0.85:
break
if plotDecayed:
dens.append(float(lnlst[1]))
# Close file
fread.close()
# Calculate values of interest
if plotDecayed:
numDens.append(dens)
else:
numDens.append([(x + y)*0.5 for (x, y) in
zip(prevline[4:], newline[4:])])
# Calculate now the agb values and print the surface mass fractions per
# each isotope
print("# Surface number fraction values")
agbValues = []
for ii in range(len(numDens)):
dic = {}
dens = numDens[ii]
# Print the model name
print("# {}".format(sys.argv[ii + 1]))
# Add the values for each element
for jj in range(len(atomicNum)):
key = atomicNum[jj]
dic[key] = dic.get(key, 0) + dens[jj]*atomicMass[jj]
# Print the number fraction
print(dens[jj])
agbValues.append(dic)
print("")
# Now identify iron:
ironNumber = namesZ["fe"]
# Now divide every element by iron
for dens in agbValues:
ironDens = dens[ironNumber]
for key in dens:
dens[key] /= ironDens
# Solar as well
ironDens = solarValues[ironNumber]
for key in solarValues:
solarValues[key] /= ironDens
# Now create the final values
finalValues = []
zList = [x for x in solarValues.keys()]
zList.sort()
for dens in agbValues:
thisDens = []
for key in zList:
if key < lowZ:
continue
val = math.log10(dens[key]/solarValues[key])
thisDens.append(val)
finalValues.append(thisDens)
# Create xaxis:
xx = [x for x in zList if x >= lowZ]
# Print final values
print("# [X/Fe] values")
for ii in range(len(sys.argv[1:])):
print("# {}".format(sys.argv[ii + 1]))
print("")
for jj in range(len(xx)):
print(xx[jj], finalValues[ii][jj])
print("")
# From zList create contIndx. This list contains a number of
# tuples with the first and last index of any contiguous sequence
indx = 1; first = 0
prevKey = None; contIndx = []
for key in xx:
if prevKey is None:
prevKey = key
continue
# Check if keys are contiguous
if key - prevKey > 1:
contIndx.append((first, indx))
first = indx
prevKey = key
indx += 1
# Add last tuple
contIndx.append((first, indx + 1))
# Begin plot
figure = plt.figure()
plt.xlabel("Atomic number Z", size = 14)
plt.ylabel("[X/Fe]", size = 14)
# Plot values
if labs is None:
labs = sys.argv[1:]
ii = 0
for dens in finalValues:
# Plot first range
first, last = contIndx[0]
if lstyles is None:
lin, = plt.plot(xx[first:last], dens[first:last],
label = labs[ii], lw = 2)
else:
lin, = plt.plot(xx[first:last], dens[first:last], lstyles[ii],
label = labs[ii], lw = 2)
# Get color and line style
col, lst = lin.get_color(), lin.get_linestyle()
colStyle = col + lst
for elem in contIndx[1:]:
first, last = elem
plt.plot(xx[first:last], dens[first:last], colStyle, lw = 2)
ii += 1
# Set floating text
namAtm = {"Co":27, "Ge":32, "Se":34, "Kr":36, "Sr":38, "Zr":40,
"Mo":42, "Pd":46, "Cd":48, "Sn":50, "Te":52, "Ba":56,
"Ce":58, "Nd":60, "Sm":62, "Gd":64, "Dy":66, "Er":68,
"Yb":70, "Hf":72, "W":74, "Os":76, "Hg":80, "Pb":82,
"Rb":37, "Cs":55}
rNamAtm = ["Rb", "Cs"]
for name in namAtm:
yVal = 0
for ii in range(len(xx)):
if xx[ii] == namAtm[name]:
yVal = finalValues[-1][ii]
break
plt.text(namAtm[name] - 0.5, yVal*1.01, name, size = 14)
if name in rNamAtm:
plt.plot(namAtm[name], yVal, "ro")
else:
plt.plot(namAtm[name], yVal, "ko")
plt.legend(loc=0, ncol = 2)
plt.text(30, 1.1, "3M$_\odot$", fontsize = 16)
plt.show()
if __name__ == "__main__":
main()<|fim▁end|> | with open(species, "r") as fread:
for line in fread: |
<|file_name|>PostProcessing.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <string>
#include <stdio.h>
#include <stdlib.h>
#include <fstream>
#include <sstream>
#include <sys/stat.h>
#include "Constants.h"
#include "Converting.cpp"
#include "ProcessVideoRecording.cpp"
#include "ProcessScreenRecording.cpp"
using namespace std;
static void show_usage(string name)
{
cerr << "Usage: " << name << " Options:\n"
<< "\t--input <filepath> \t The filepath to the folder where the video files and settings are located\n"
<< "\t--output <foldername> \t The folder name for the output (will be created inside the input folder) \n"
<< "\t--eyealg <algorithm>\t The algorithm that is used for detecting the eye center. Possible Values: grad, isoph, comb \n"
<< "\t--gazealg <algorithm>\t The algorithm that is used for detecting the gaze. Possible Values: approx, geo \n"
<< "\t--fastwidth \t The window size of the scaled window that is used for detecting the eye center. (optional) Default: 50 for grad, 80 for isoph algo\n"
<< "\t--convertfps \t Indicates that we want to convert FPS (optional) Default: off\n"
<< "\t--drawonvideo \t Indicates that we want to draw the gaze points on the recorded video (optional) Default: off\n"
<< "\t--drawonscreen \t Indicates that we want to draw the gaze points on the recorded screen (optional but requires --convertfps) Default: off\n"
<< endl;
}
static bool existsFile (const string& name) {
ifstream f(name.c_str());
if (f.good()) {
f.close();
return true;
} else {
f.close();
return false;
}
}
int main (int argc, char *argv[]){
// Check if we have at least 7 arguments
if (argc < 7){
show_usage(argv[0]);
return 1;
}
// Check the arguments itself
string folderFilePath;
string outputFolderName;
string eyeCenterDetectAlgo;
string gazeTrackingAlgo;
bool convertFPS = false;
bool drawOnVideo = false;
bool drawOnScreen = false;
int fastEyeWidth = 0;
for (int i = 1; i < argc; ++i) {
// **** required parameters *****
//Check the video input option
if (string(argv[i]) == "--input") {
if (i + 1 < argc) {
folderFilePath = argv[++i];
} else {
cerr << "--input option requires one argument." << endl;
show_usage(argv[0]);
return 1;
}
}
//Check the output video option
if (string(argv[i]) == "--output") {
if (i + 1 < argc) {
outputFolderName = argv[++i];
outputFolderName += "/";
} else {
cerr << "--output option requires one argument." << endl;
show_usage(argv[0]);
return 1;
}
}
//Check the eye algorithm option
if (string(argv[i]) == "--eyealg") {
if (i + 1 < argc) {
eyeCenterDetectAlgo = argv[++i];
} else {
cerr << "--eyealg option requires one argument." << endl;
show_usage(argv[0]);
return 1;
}
}
//Check the gaze algorithm option
if (string(argv[i]) == "--gazealg") {<|fim▁hole|> } else {
cerr << "--gazealg option requires one argument." << endl;
show_usage(argv[0]);
return 1;
}
}
// **** optional parameters *****
//Check the convertFPS option
if (string(argv[i]) == "--convertfps") {
convertFPS = true;
}
//Check the drawOnVideo option
if (string(argv[i]) == "--drawonvideo") {
drawOnVideo = true;
}
//Check the drawOnScreen option
if (string(argv[i]) == "--drawonscreen") {
drawOnScreen = true;
}
//Check the fastSizeWidth Argument
if (string(argv[i]) == "--fastwidth") {
if (i + 1 < argc) { //it must be one available
istringstream ss(argv[++i]);
if(!(ss >> fastEyeWidth)){ //it must be a valid number
cerr << "Invalid Number for argument --fastwidth" << endl;
show_usage(argv[0]);
return 1;
}
} else {
cerr << "--fastwidth option requires one argument." << endl;
show_usage(argv[0]);
return 1;
}
}
}
// **** check all argument requirements ****
// Check if the required files are available within the folder
if(!existsFile(folderFilePath+SETTINGS_FILE)){
cerr << "Missing file in folder: GazeTrackingSettings.txt" << endl;
return 1;
}
if(!existsFile(folderFilePath+RAW_INPUT_VIDEO)){
cerr << "Missing file in folder: video_recording_raw_vfr.mp4" << endl;
return 1;
}
if(outputFolderName.empty()){
cerr << "Invalid output folder. Either missing or empty/invalid string!" << endl;
return 1;
}
if(drawOnScreen){ //must only be available if we want to draw on the screen
if(!existsFile(folderFilePath+RAW_INPUT_SCREEN)){
cerr << "Missing file in folder: screen_recording_raw_vfr.txt" << endl;
return 1;
}
}
// Check if the eye center algorithm was specified correctly
if(eyeCenterDetectAlgo == "grad"){
eyeCenterDetectAlgo = "EYE_CENTER_ALGO_GRADIENTS";
//Set the default fast size if no fast size was specified
if(fastEyeWidth <= 0){
fastEyeWidth = 50;
}
} else if (eyeCenterDetectAlgo == "isoph"){
eyeCenterDetectAlgo = "EYE_CENTER_ALGO_ISOPHOTES";
if(fastEyeWidth <= 0){
fastEyeWidth = 80;
}
} else if (eyeCenterDetectAlgo == "comb"){
eyeCenterDetectAlgo = "EYE_CENTER_ALGO_COMBINED";
fastEyeWidth = 0; //we do not need this setting as we cannot specify it for the combined version (always uses 50/80)
} else {
cerr << "Invalid Eye Center Algorithm" << endl;
show_usage(argv[0]);
return 1;
}
// Check if the gaze tracking algorithm was specified correctly
if(gazeTrackingAlgo == "approx"){
gazeTrackingAlgo = "GAZE_TRACKING_ALGO_APPROX";
} else if (gazeTrackingAlgo == "geo"){
gazeTrackingAlgo = "GAZE_TRACKING_ALGO_GEO";
} else {
cerr << "Invalid Gaze Tracking Algorithm" << endl;
show_usage(argv[0]);
return 1;
}
// Create the post proc folder
mkdir((folderFilePath+outputFolderName).c_str(),0777);
//Depending on the availability start the corresponding steps
// Step1 => Check if we have to do a video conversion
if(convertFPS){
convert_fps(folderFilePath, outputFolderName, RAW_INPUT_VIDEO, RAW_INPUT_VIDEO_CFR);
if(drawOnScreen){
convert_fps(folderFilePath, outputFolderName, RAW_INPUT_SCREEN, RAW_INPUT_SCREEN_CFR);
}
}
// Step2 => Convert the video file (consider the different type of input files)
if(convertFPS){
string postProcFolder = folderFilePath+outputFolderName;
process_video(postProcFolder+RAW_INPUT_VIDEO_CFR, postProcFolder+PROC_VIDEO_RECORDING_CFR, folderFilePath+SETTINGS_FILE, postProcFolder+TEXT_GAZE_POINTS_CFR, drawOnVideo, eyeCenterDetectAlgo,gazeTrackingAlgo, fastEyeWidth);
} else {
string postProcFolder = folderFilePath+outputFolderName;
process_video(folderFilePath+RAW_INPUT_VIDEO, postProcFolder+PROC_VIDEO_RECORDING_VFR, folderFilePath+SETTINGS_FILE, postProcFolder+TEXT_GAZE_POINTS_VFR, drawOnVideo, eyeCenterDetectAlgo,gazeTrackingAlgo, fastEyeWidth);
}
// Step3 => Draw GazePoints on the Screen Recording file
if(drawOnScreen && convertFPS){
string postProcFolder = folderFilePath+outputFolderName;
process_screen(postProcFolder+RAW_INPUT_SCREEN_CFR, postProcFolder+PROC_SCREEN_RECORDING_CFR, postProcFolder+TEXT_GAZE_POINTS_CFR);
}
return 0;
}<|fim▁end|> | if (i + 1 < argc) {
gazeTrackingAlgo = argv[++i]; |
<|file_name|>RenameExampleClassPlugin.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2006-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.solmix.generator.plugin;
import static org.solmix.commons.util.StringUtils.stringHasValue;
import static org.solmix.generator.util.Messages.getString;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.solmix.generator.api.IntrospectedTable;
import org.solmix.generator.api.PluginAdapter;
/**
* This plugin demonstrates overriding the initialized() method to rename the
* generated example classes. Instead of xxxExample, the classes will be named
* xxxCriteria.
*
* <p>This plugin accepts two properties:
*
* <ul>
* <li><tt>searchString</tt> (required) the regular expression of the name
* search.</li>
* <li><tt>replaceString</tt> (required) the replacement String.</li>
* </ul>
*
* <p>For example, to change the name of the generated Example classes from
* xxxExample to xxxCriteria, specify the following:
*
* <dl><|fim▁hole|> * <dd>Criteria</dd>
* </dl>
*
*
* @author Jeff Butler
*
*/
public class RenameExampleClassPlugin extends PluginAdapter {
private String searchString;
private String replaceString;
private Pattern pattern;
public RenameExampleClassPlugin() {
}
@Override
public boolean validate(List<String> warnings) {
searchString = properties.getProperty("searchString");
replaceString = properties.getProperty("replaceString");
boolean valid = stringHasValue(searchString)
&& stringHasValue(replaceString);
if (valid) {
pattern = Pattern.compile(searchString);
} else {
if (!stringHasValue(searchString)) {
warnings.add(getString("ValidationError.18",
"RenameExampleClassPlugin",
"searchString"));
}
if (!stringHasValue(replaceString)) {
warnings.add(getString("ValidationError.18",
"RenameExampleClassPlugin",
"replaceString"));
}
}
return valid;
}
@Override
public void initialized(IntrospectedTable introspectedTable) {
String oldType = introspectedTable.getExampleType();
Matcher matcher = pattern.matcher(oldType);
oldType = matcher.replaceAll(replaceString);
introspectedTable.setExampleType(oldType);
}
}<|fim▁end|> | * <dt>searchString</dt>
* <dd>Example$</dd>
* <dt>replaceString</dt> |
<|file_name|>analyzeMissingDates.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
var http = require('http');
var moment = require('moment');
var server = undefined;
var threshold = undefined;
var units = undefined;
if (!isValid(process.argv)) {
console.error('invalid arguments, expected hostname threshold and units');
process.exit(-1);
}
var request = http.request('http://' + server + '/api/temperatures', function(response) {
var statusCode = response.statusCode;<|fim▁hole|> var json = undefined;
if (statusCode === 200) {
response.on('data', function(chunk) {
result.push(chunk.toString());
});
response.on('end', function() {
json = JSON.parse(result.join(''));
analyze(json);
});
}
});
request.end();
function analyze(data) {
var length = data.length;
var i, sensorData, sensorId, sensorLog, dates;
var analysis;
for (i = 0; i < length; i++) {
sensorData = data[i];
sensorId = sensorData['_id'];
sensorLog = sensorData['value'];
dates = sensorLog.map(function(log) {
return moment(log.date);
});
dates.sort(function(a, b) {
return (a < b ? -1 : (a > b ? 1 : 0));
});
analysis = dates.reduce(function(analysis, to) {
var from = analysis.previous;
var diff;
if (analysis.previous) {
diff = to.diff(from, units);
if (diff > threshold) {
analysis.result.push({
diff: diff + ' ' + units,
from: from.format('YYMMDDHHmm'),
to: to.format('YYMMDDHHmm')
});
}
}
return {
previous: to,
result: analysis.result
};
}, { result: [] });
console.log(sensorId, analysis.result);
}
}
function isValid(args) {
if (args.length === 5) {
server = args[2];
threshold = parseInt(args[3], 10);
units = args[4];
return true;
}
else {
return false;
}
}<|fim▁end|> | var result = []; |
<|file_name|>geo-util.js<|end_file_name|><|fim▁begin|>define([
'gmaps',
'config',
'leaflet',
'leaflet-pip',
'app-state'
], function (gmaps, config, L, leafletPip, appState) {
"use strict";<|fim▁hole|> var
geocoder = new gmaps.Geocoder(),
// simplifies place name for geocoder to get better results
locationAddress = function(place_name, district_name) {
district_name = district_name || config.defaultDistrictName;
return place_name
.replace(/ /gi, ' ')
.split('x ', 1)[0]
.split('(', 1)[0]
.split(' - ', 1)[0]
.split(' – ', 1)[0] // EN DASH character
.replace('křižovatka ', '')
.replace('ul. ', '')
.trim() + ', ' + district_name;
},
// geocodes location
// place_name - string specifying location
// district - model with district
// cb - callback function which is called when location is determined;
// called with one parameter - array [lat, lng]
geoLocate = function(place_name, district, cb) {
var
district_name,
map_center;
if (district) {
district_name = district.get('properties').district_name;
map_center = district.getCenter();
} else {
map_center = config.mapCenter;
}
geocoder.geocode({'address': locationAddress(place_name, district_name)}, function(data, status) {
if (status == gmaps.GeocoderStatus.OK &&
data[0].geometry.location_type != gmaps.GeocoderLocationType.APPROXIMATE &&
(!('partial_match' in data[0]) || data[0].partial_match !== true)) {
cb([data[0].geometry.location.lat(), data[0].geometry.location.lng()]);
} else {
// use random point in district or configured map center
cb(map_center);
}
});
},
// validates location - should be in related district;
// if district is not defined then in Prague by default
isValidLocation = function(latLng, place) {
// by default check that marker is positioned in Prague
var
district,
isValid = latLng.lat < config.borders.maxLat &&
latLng.lat > config.borders.minLat &&
latLng.lng < config.borders.maxLng &&
latLng.lng > config.borders.minLng
;
if (place.get('district_id')) {
district = appState.districts.get(place.get('district_id'));
if (district) {
// district model already in the collection
if (district.has('geometry')) {
// pointInLayer returns array of matched layers; empty array if nothing was matched
isValid = (leafletPip.pointInLayer(latLng, L.geoJson(district.get('geometry')), true).length > 0);
}
}
}
return isValid;
}
;
return {
geoLocate: geoLocate,
locationAddress: locationAddress,
isValidLocation: isValidLocation
};
});<|fim▁end|> | |
<|file_name|>HedgeFunction.cpp<|end_file_name|><|fim▁begin|>/*
fuzzylite (R), a fuzzy logic control library in C++.
Copyright (C) 2010-2017 FuzzyLite Limited. All rights reserved.
Author: Juan Rada-Vilela, Ph.D. <[email protected]>
This file is part of fuzzylite.
fuzzylite is free software: you can redistribute it and/or modify it under
the terms of the FuzzyLite License included with the software.
You should have received a copy of the FuzzyLite License along with
fuzzylite. If not, see <http://www.fuzzylite.com/license/>.
fuzzylite is a registered trademark of FuzzyLite Limited.
*/
#include "fl/hedge/HedgeFunction.h"
namespace fl {
HedgeFunction::HedgeFunction(const std::string& formula) : Hedge() {
_function.variables["x"] = fl::nan;
if (not formula.empty()) {
_function.load(formula);
}
}
std::string HedgeFunction::name() const {
return "HedgeFunction";
}
Complexity HedgeFunction::complexity() const {
if (_function.root())
return _function.complexity().function(2 * std::log(scalar(_function.variables.size())));
return _function.complexity();
}
scalar HedgeFunction::hedge(scalar x) const {
_function.variables["x"] = x;<|fim▁hole|>
Function& HedgeFunction::function() {
return this->_function;
}
void HedgeFunction::setFormula(const std::string& formula) {
_function.load(formula);
}
std::string HedgeFunction::getFormula() const {
return _function.getFormula();
}
HedgeFunction* HedgeFunction::clone() const {
return new HedgeFunction(*this);
}
Hedge* HedgeFunction::constructor() {
return new HedgeFunction;
}
}<|fim▁end|> | return _function.membership(x);
} |
<|file_name|>Sound.cpp<|end_file_name|><|fim▁begin|>#include "Sound.h"
#include <Windows.h>
#include "DigitalGraffiti.h"
Sound::Sound(void)
<|fim▁hole|>{
// Find music and sound files
std::string exeDir = DigitalGraffiti::getExeDirectory();
DigitalGraffiti::getFileList(exeDir + "\\sound\\instructions\\*", instructionsMusicList);
DigitalGraffiti::getFileList(exeDir + "\\sound\\cleanup\\*", cleanupMusicList);
DigitalGraffiti::getFileList(exeDir + "\\sound\\splat\\*", splatSoundList);
instructionsCounter = 0;
cleanupCounter = 0;
splatCounter = 0;
numInstructions= instructionsMusicList.size();
numCleanup = cleanupMusicList.size();
numSplat = splatSoundList.size();
if(DigitalGraffiti::DEBUG)
{
printf("Sound directory is: %s\n", exeDir.c_str());
printf("\tnumInstructions = %u\n", numInstructions);
printf("\tnumCleanup = %u\n", numCleanup);
printf("\tnumSplat = %u\n", numSplat);
}
}
void Sound::playInstructionsMusic(void)
{
if(numInstructions > 0)
{
if(DigitalGraffiti::DEBUG)
{
printf("Play %s\n", instructionsMusicList[instructionsCounter].c_str());
}
PlaySound(TEXT(instructionsMusicList[instructionsCounter].c_str()), NULL, SND_FILENAME | SND_ASYNC| SND_NOWAIT);
instructionsCounter = (instructionsCounter + 1) % numInstructions;
}
}
void Sound::playCleanupMusic(void)
{
if(numCleanup > 0)
{
if(DigitalGraffiti::DEBUG)
{
printf("Play %s\n", cleanupMusicList[cleanupCounter].c_str());
}
PlaySound(TEXT(cleanupMusicList[cleanupCounter].c_str()), NULL, SND_FILENAME | SND_ASYNC| SND_NOWAIT);
cleanupCounter = (cleanupCounter + 1) % numCleanup;
}
}
void Sound::playSplatSound(void)
{
if(numSplat > 0)
{
if(DigitalGraffiti::DEBUG)
{
printf("Play %s\n", splatSoundList[splatCounter].c_str());
}
PlaySound(TEXT(splatSoundList[splatCounter].c_str()), NULL, SND_FILENAME | SND_ASYNC| SND_NOWAIT);
splatCounter = (splatCounter + 1) % numSplat;
}
}<|fim▁end|> | |
<|file_name|>user.go<|end_file_name|><|fim▁begin|>package handler
import (
"strconv"
"github.com/dinever/golf"
"github.com/dingoblog/dingo/app/model"
)
func registerUserHandlers(app *golf.Application, routes map[string]map[string]interface{}) {
app.Get("/api/users", APIUsersHandler)
routes["GET"]["users_url"] = "/api/users"
app.Get("/api/users/:user_id", APIUserHandler)
routes["GET"]["user_url"] = "/api/users/:user_id"
<|fim▁hole|> app.Get("/api/users/slug/:slug", APIUserSlugHandler)
routes["GET"]["user_slug_url"] = "/api/users/slug/:slug"
app.Get("/api/users/email/:email", APIUserEmailHandler)
routes["GET"]["user_email_url"] = "/api/users/email/:email"
}
// APIUserHandler retrieves the user with the given id.
func APIUserHandler(ctx *golf.Context) {
id, err := strconv.Atoi(ctx.Param("user_id"))
if err != nil {
handleErr(ctx, 500, err)
return
}
user := &model.User{Id: int64(id)}
err = user.GetUserById()
if err != nil {
handleErr(ctx, 404, err)
return
}
ctx.JSONIndent(user, "", " ")
}
// APIUserSlugHandler retrives the user with the given slug.
func APIUserSlugHandler(ctx *golf.Context) {
slug := ctx.Param("slug")
user := &model.User{Slug: slug}
err := user.GetUserBySlug()
if err != nil {
handleErr(ctx, 404, err)
return
}
ctx.JSONIndent(user, "", " ")
}
// APIUserEmailHandler retrieves the user with the given email.
func APIUserEmailHandler(ctx *golf.Context) {
email := ctx.Param("email")
user := &model.User{Email: email}
err := user.GetUserByEmail()
if err != nil {
handleErr(ctx, 404, err)
return
}
ctx.JSONIndent(user, "", " ")
}
// APIUsersHandler retrieves all users.
func APIUsersHandler(ctx *golf.Context) {
ctx.JSONIndent(map[string]interface{}{
"message": "Not implemented",
}, "", " ")
}<|fim▁end|> | |
<|file_name|>StorageService.java<|end_file_name|><|fim▁begin|>package com.nowgroup.scspro.service.cat;
import com.nowgroup.scspro.dto.cat.Storage;
import com.nowgroup.scspro.dto.geo.State;
import com.nowgroup.scspro.service.BaseService;<|fim▁hole|> State getStateInStorage(int id);
}<|fim▁end|> |
public interface StorageService extends BaseService<Storage> { |
<|file_name|>security_groups.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import attr
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic.widget import View, Select
from widgetastic_manageiq import (
Accordion, BaseEntitiesView, BootstrapSelect, BreadCrumb, ItemsToolBarViewSelector,
ManageIQTree, SummaryTable, Text, TextInput)
from widgetastic_patternfly import Dropdown, Button
from cfme.base.ui import BaseLoggedInPage
from cfme.exceptions import ItemNotFound, SecurityGroupsNotFound
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.appliance.implementations.ui import navigate_to, navigator, CFMENavigateStep
from cfme.utils.blockers import BZ
from cfme.utils.wait import wait_for
class SecurityGroupToolbar(View):
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
download = Dropdown('Download')
view_selector = View.nested(ItemsToolBarViewSelector)
class SecurityGroupDetailsToolbar(View):
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
download = Button(title='Download summary in PDF format')
class SecurityGroupDetailsAccordion(View):
@View.nested
class properties(Accordion): # noqa
tree = ManageIQTree()
@View.nested
class relationships(Accordion): # noqa
tree = ManageIQTree()
class SecurityGroupDetailsEntities(View):
breadcrumb = BreadCrumb()
title = Text('//div[@id="main-content"]//h1')
properties = SummaryTable(title='Properties')
relationships = SummaryTable(title='Relationships')
smart_management = SummaryTable(title='Smart Management')
firewall_rules = SummaryTable(title="Firewall Rules")
class SecurityGroupAddEntities(View):
breadcrumb = BreadCrumb()
title = Text('//div[@id="main-content"]//h1')
class SecurityGroupAddForm(View):
network_manager = BootstrapSelect(id='ems_id')
name = TextInput(name='name')
description = TextInput(name='description')
cloud_tenant = Select(name='cloud_tenant_id')
add = Button('Add')
cancel = Button('Cancel')
class SecurityGroupView(BaseLoggedInPage):
"""Base view for header and nav checking, navigatable views should inherit this"""
@property
def in_security_groups(self):
return(
self.logged_in_as_current_user and
self.navigation.currently_selected == ['Networks', 'Security Groups'])
class SecurityGroupAllView(SecurityGroupView):
@property
def is_displayed(self):
return (
self.in_security_groups and
self.entities.title.text == 'Security Groups')
toolbar = View.nested(SecurityGroupToolbar)
including_entities = View.include(BaseEntitiesView, use_parent=True)
class SecurityGroupDetailsView(SecurityGroupView):
@property
def is_displayed(self):
expected_title = '{} (Summary)'.format(self.context['object'].name)
return (
self.in_security_groups and
self.entities.title.text == expected_title and
self.entities.breadcrumb.active_location == expected_title)
toolbar = View.nested(SecurityGroupDetailsToolbar)
sidebar = View.nested(SecurityGroupDetailsAccordion)
entities = View.nested(SecurityGroupDetailsEntities)
class SecurityGroupAddView(SecurityGroupView):
@property
def is_displayed(self):
return (
self.in_security_groups and
self.entities.breadcrumb.active_location == 'Add New Security Group' and
self.entities.title.text == 'Add New Security Group')
entities = View.nested(SecurityGroupAddEntities)
form = View.nested(SecurityGroupAddForm)
@attr.s
class SecurityGroup(BaseEntity):
""" Automate Model page of SecurityGroup
Args:
provider (obj): Provider name for Network Manager
name(str): name of the Security Group
description (str): Security Group description
"""
_param_name = "SecurityGroup"
name = attr.ib()
provider = attr.ib()
description = attr.ib(default="")
def refresh(self):
self.provider.refresh_provider_relationships()
self.browser.refresh()
def delete(self, cancel=False, wait=False):
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Delete this Security Group',
handle_alert=(not cancel))
# cancel doesn't redirect, confirmation does
view.flush_widget_cache()
if not cancel:
view = self.create_view(SecurityGroupAllView)
view.is_displayed
view.flash.assert_success_message('Delete initiated for 1 Security Group.')
if wait:
wait_for(
lambda: self.name in view.entities.all_entity_names,
message="Wait Security Group to disappear",
fail_condition=True,
num_sec=500,
timeout=1000,
delay=20,
fail_func=self.refresh
)
@property
def exists(self):
try:
navigate_to(self, 'Details')
except SecurityGroupsNotFound:
return False
else:
return True
@attr.s
class SecurityGroupCollection(BaseCollection):
""" Collection object for the :py:class: `cfme.cloud.SecurityGroup`. """
ENTITY = SecurityGroup
def create(self, name, description, provider, cancel=False, wait=False):
"""Create new Security Group.
Args:
provider (obj): Provider name for Network Manager
name (str): name of the Security Group
description (str): Security Group description
cancel (boolean): Cancel Security Group creation
wait (boolean): wait if Security Group created
"""
view = navigate_to(self, 'Add')
changed = view.form.fill({'network_manager': "{} Network Manager".format(provider.name),
'name': name,
'description': description,
'cloud_tenant': 'admin'})
if cancel and changed:
view.form.cancel.click()
flash_message = 'Add of new Security Group was cancelled by the user'
else:
view.form.add.click()
flash_message = 'Security Group "{}" created'.format(name)
# add/cancel should redirect, new view
view = self.create_view(SecurityGroupAllView)
view.flash.assert_success_message(flash_message)
view.entities.paginator.set_items_per_page(500)
sec_groups = self.instantiate(name, provider, description)
if wait:
wait_for(
lambda: sec_groups.name in view.entities.all_entity_names,<|fim▁hole|> fail_func=sec_groups.refresh,
handle_exception=True
)
return sec_groups
# TODO: Delete collection as Delete option is not available on List view and update
@navigator.register(SecurityGroupCollection, 'All')
class SecurityGroupAll(CFMENavigateStep):
VIEW = SecurityGroupAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self, *args, **kwargs):
self.prerequisite_view.navigation.select('Networks', 'Security Groups')
@navigator.register(SecurityGroup, 'Details')
class Details(CFMENavigateStep):
VIEW = SecurityGroupDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
try:
self.prerequisite_view.entities.get_entity(name=self.obj.name, surf_pages=True).click()
except ItemNotFound:
raise SecurityGroupsNotFound("Security Groups {} not found".format(
self.obj.name))
@navigator.register(SecurityGroupCollection, 'Add')
class Add(CFMENavigateStep):
VIEW = SecurityGroupAddView
prerequisite = NavigateToSibling("All")
def step(self, *args, **kwargs):
"""Raises DropdownItemDisabled from widgetastic_patternfly
if no RHOS Network manager present"""
# Todo remove when fixed 1520669
if (BZ(1520669, forced_streams='5.9').blocks and
self.prerequisite_view.flash.messages):
self.prerequisite_view.flash.dismiss()
self.prerequisite_view.toolbar.configuration.item_select('Add a new Security Group')<|fim▁end|> | message="Wait Security Group to appear",
num_sec=400,
timeout=1000,
delay=20, |
<|file_name|>motionSmoother.H<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright held by original author
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Class
Foam::motionSmoother
Description
Given a displacement moves the mesh by scaling the displacement back
until there are no more mesh errors.
Holds displacement field (read upon construction since need boundary
conditions) and scaling factor and optional patch number on which to
scale back displacement.
E.g.
@verbatim
// Construct iterative mesh mover.
motionSmoother meshMover(mesh, labelList(1, patchI));
// Set desired displacement:
meshMover.displacement() = ..
for (label iter = 0; iter < maxIter; iter++)
{
if (meshMover.scaleMesh(true))
{
Info<< "Successfully moved mesh" << endl;
return true;
}
}
@endverbatim
Note
- Shared points (parallel): a processor can have points which are part of
pp on another processor but have no pp itself (i.e. it has points
and/or edges but no faces of pp). Hence we have to be careful when e.g.
synchronising displacements that the value from the processor which has
faces of pp get priority. This is currently handled in setDisplacement
by resetting the internal displacement to zero before doing anything
else. The combine operator used will give preference to non-zero
values.
- Various routines take baffles. These are sets of boundary faces that
are treated as a single internal face. This is a hack used to apply
movement to internal faces.
- Mesh constraints are looked up from the supplied dictionary. (uses
recursive lookup)
SourceFiles
motionSmoother.C
motionSmootherTemplates.C
\*---------------------------------------------------------------------------*/
#ifndef motionSmoother_H
#define motionSmoother_H
#include "pointFields.H"
#include "HashSet.H"
#include "PackedBoolList.H"
#include "indirectPrimitivePatch.H"
#include "className.H"
#include "twoDPointCorrector.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
class polyMeshGeometry;
class faceSet;
/*---------------------------------------------------------------------------*\
Class motionSmoother Declaration
\*---------------------------------------------------------------------------*/
class motionSmoother
{
// Private class
//- To synchronise displacements. We want max displacement since
// this is what is specified on pp and internal mesh will have
// zero displacement.
class maxMagEqOp
{
public:
void operator()(vector& x, const vector& y) const
{
for (direction i = 0; i < vector::nComponents; i++)
{
scalar magX = mag(x[i]);
scalar magY = mag(y[i]);
if (magX < magY)
{
x[i] = y[i];
}
else if (magX == magY)
{
if (y[i] > x[i])
{
x[i] = y[i];
}
}
}
}
};
// Private data
//- Reference to polyMesh. Non-const since we move mesh.
polyMesh& mesh_;
//- Reference to pointMesh
pointMesh& pMesh_;
//- Reference to face subset of all adaptPatchIDs
indirectPrimitivePatch& pp_;
//- Indices of fixedValue patches that we're allowed to modify the
// displacement on.
const labelList adaptPatchIDs_;
// Smoothing and checking parameters
dictionary paramDict_;
// Internal data
//- Displacement field
pointVectorField displacement_;
//- Scale factor for displacement
pointScalarField scale_;
//- Starting mesh position
pointField oldPoints_;
//- Is mesh point on boundary or not
PackedBoolList isInternalPoint_;
//- Is edge master (always except if on coupled boundary and on
// lower processor)
PackedBoolList isMasterEdge_;
//- 2-D motion corrector
twoDPointCorrector twoDCorrector_;
// Muli-patch constraints (from pointPatchInterpolation)
labelList patchPatchPointConstraintPoints_;
tensorField patchPatchPointConstraintTensors_;
// Private Member Functions
//- Average of connected points.
template <class Type>
tmp<GeometricField<Type, pointPatchField, pointMesh> > avg
(
const GeometricField<Type, pointPatchField, pointMesh>& fld,
const scalarField& edgeWeight,
const bool separation
) const;
//- Check constraints
template<class Type>
static void checkConstraints
(
GeometricField<Type, pointPatchField, pointMesh>&
);
//- Multi-patch constraints
template<class Type>
void applyCornerConstraints
(
GeometricField<Type, pointPatchField, pointMesh>&
) const;
//- Test synchronisation of pointField
template<class Type, class CombineOp>
void testSyncField
(
const Field<Type>&,
const CombineOp& cop,
const Type& zero,
const bool separation,
const scalar maxMag
) const;
//- Assemble tensors for multi-patch constraints
void makePatchPatchAddressing();
static void checkFld(const pointScalarField&);
//- Get points used by given faces
labelHashSet getPoints(const labelHashSet&) const;
//- explicit smoothing and min on all affected internal points
void minSmooth
(
const PackedBoolList& isAffectedPoint,
const pointScalarField& fld,
pointScalarField& newFld
) const;
//- same but only on selected points (usually patch points)
void minSmooth
(
const PackedBoolList& isAffectedPoint,
const labelList& meshPoints,
const pointScalarField& fld,
pointScalarField& newFld
) const;
//- Scale certain (internal) points of a field
void scaleField
(
const labelHashSet& pointLabels,
const scalar scale,
pointScalarField&
) const;
//- As above but points have to be in meshPoints as well
// (usually to scale patch points)
void scaleField
(
const labelList& meshPoints,
const labelHashSet& pointLabels,
const scalar scale,
pointScalarField&
) const;
//- Helper function. Is point internal?
bool isInternalPoint(const label pointI) const;
//- Given a set of faces that cause smoothing and a number of
// iterations determine the maximum set of points who are affected
// and the accordingly affected faces.
void getAffectedFacesAndPoints
(
const label nPointIter,
const faceSet& wrongFaces,
labelList& affectedFaces,
PackedBoolList& isAffectedPoint
) const;
//- Disallow default bitwise copy construct
motionSmoother(const motionSmoother&);
//- Disallow default bitwise assignment
void operator=(const motionSmoother&);
public:
<|fim▁hole|> //- Construct from mesh, patches to work on and smoothing parameters.
// Reads displacement field (only boundary conditions used)
motionSmoother
(
polyMesh&,
pointMesh&,
indirectPrimitivePatch& pp, // 'outside' points
const labelList& adaptPatchIDs, // patches forming 'outside'
const dictionary& paramDict
);
//- Construct from mesh, patches to work on and smoothing parameters
// and displacement field (only boundary conditions used)
motionSmoother
(
polyMesh&,
indirectPrimitivePatch& pp, // 'outside' points
const labelList& adaptPatchIDs, // patches forming 'outside'
const pointVectorField&,
const dictionary& paramDict
);
// Destructor
~motionSmoother();
// Member Functions
// Access
//- Reference to mesh
const polyMesh& mesh() const;
//- Reference to pointMesh
const pointMesh& pMesh() const;
//- Reference to patch
const indirectPrimitivePatch& patch() const;
//- Patch labels that are being adapted
const labelList& adaptPatchIDs() const;
const dictionary& paramDict() const;
//- Reference to displacement field
pointVectorField& displacement();
//- Reference to displacement field
const pointVectorField& displacement() const;
//- Reference to scale field
const pointScalarField& scale() const;
//- Starting mesh position
const pointField& oldPoints() const;
//- Return reference to 2D point motion correction
twoDPointCorrector& twoDCorrector()
{
return twoDCorrector_;
}
// Edit
//- Take over existing mesh position.
void correct();
//- Set displacement field from displacement on patch points.
// Modify provided displacement to be consistent with actual
// boundary conditions on displacement. Note: resets the
// displacement to be 0 on coupled patches beforehand
// to make sure shared points
// partially on pp (on some processors) and partially not
// (on other processors) get the value from pp.
void setDisplacement(pointField& patchDisp);
//- Special correctBoundaryConditions which evaluates fixedValue
// patches first so they get overwritten with any constraint
// bc's.
void correctBoundaryConditions(pointVectorField&) const;
//- Move mesh. Does 2D correction (modifies passed pointField) and
// polyMesh::movePoints. Returns swept volumes.
tmp<scalarField> movePoints(pointField&);
//- Set the errorReduction (by how much to scale the displacement
// at error locations) parameter. Returns the old value.
// Set to 0 (so revert to old mesh) grows out one cell layer
// from error faces.
scalar setErrorReduction(const scalar);
//- Move mesh with given scale. Return true if mesh ok or has
// less than nAllow errors, false
// otherwise and locally update scale. Smoothmesh=false means only
// patch points get moved.
// Parallel ok (as long as displacement field is consistent
// across patches)
bool scaleMesh
(
labelList& checkFaces,
const bool smoothMesh = true,
const label nAllow = 0
);
//- Move mesh (with baffles) with given scale.
bool scaleMesh
(
labelList& checkFaces,
const List<labelPair>& baffles,
const bool smoothMesh = true,
const label nAllow = 0
);
//- Move mesh with externally provided mesh constraints
bool scaleMesh
(
labelList& checkFaces,
const List<labelPair>& baffles,
const dictionary& paramDict,
const dictionary& meshQualityDict,
const bool smoothMesh = true,
const label nAllow = 0
);
//- Update topology
void updateMesh();
//- Check mesh with mesh settings in dict. Collects incorrect faces
// in set. Returns true if one or more faces in error.
// Parallel ok.
static bool checkMesh
(
const bool report,
const polyMesh& mesh,
const dictionary& dict,
labelHashSet& wrongFaces
);
//- Check (subset of mesh) with mesh settings in dict.
// Collects incorrect faces in set. Returns true if one
// or more faces in error. Parallel ok.
static bool checkMesh
(
const bool report,
const polyMesh& mesh,
const dictionary& dict,
const labelList& checkFaces,
labelHashSet& wrongFaces
);
//- Check (subset of mesh including baffles) with mesh settings
// in dict. Collects incorrect faces in set. Returns true if one
// or more faces in error. Parallel ok.
static bool checkMesh
(
const bool report,
const polyMesh& mesh,
const dictionary& dict,
const labelList& checkFaces,
const List<labelPair>& baffles,
labelHashSet& wrongFaces
);
//- Check part of mesh with mesh settings in dict.
// Collects incorrect faces in set. Returns true if one or
// more faces in error. Parallel ok.
static bool checkMesh
(
const bool report,
const dictionary& dict,
const polyMeshGeometry&,
const labelList& checkFaces,
labelHashSet& wrongFaces
);
//- Check part of mesh including baffles with mesh settings in dict.
// Collects incorrect faces in set. Returns true if one or
// more faces in error. Parallel ok.
static bool checkMesh
(
const bool report,
const dictionary& dict,
const polyMeshGeometry&,
const labelList& checkFaces,
const List<labelPair>& baffles,
labelHashSet& wrongFaces
);
// Helper functions to manipulate displacement vector.
//- Fully explicit smoothing of internal points with varying
// diffusivity.
template <class Type>
void smooth
(
const GeometricField<Type, pointPatchField, pointMesh>& fld,
const scalarField& edgeWeight,
const bool separation,
GeometricField<Type, pointPatchField, pointMesh>& newFld
) const;
};
template<>
void motionSmoother::applyCornerConstraints<scalar>
(
GeometricField<scalar, pointPatchField, pointMesh>& pf
) const;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#ifdef NoRepository
# include "motionSmootherTemplates.C"
#endif
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //<|fim▁end|> | ClassName("motionSmoother");
// Constructors
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::{DocumentLoader, LoadType};
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::DocumentBinding::{
DocumentMethods, DocumentReadyState,
};
use crate::dom::bindings::codegen::Bindings::HTMLImageElementBinding::HTMLImageElementMethods;
use crate::dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use crate::dom::bindings::codegen::Bindings::ServoParserBinding;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::bindings::settings_stack::is_execution_stack_empty;
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::characterdata::CharacterData;
use crate::dom::comment::Comment;
use crate::dom::document::{Document, DocumentSource, HasBrowsingContext, IsHTMLDocument};
use crate::dom::documenttype::DocumentType;
use crate::dom::element::{CustomElementCreationMode, Element, ElementCreator};
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlformelement::{FormControlElementHelpers, HTMLFormElement};
use crate::dom::htmlimageelement::HTMLImageElement;
use crate::dom::htmlscriptelement::{HTMLScriptElement, ScriptResult};
use crate::dom::htmltemplateelement::HTMLTemplateElement;
use crate::dom::node::{Node, ShadowIncluding};
use crate::dom::performanceentry::PerformanceEntry;
use crate::dom::performancenavigationtiming::PerformanceNavigationTiming;
use crate::dom::processinginstruction::ProcessingInstruction;
use crate::dom::text::Text;
use crate::dom::virtualmethods::vtable_for;
use crate::network_listener::PreInvoke;
use crate::script_thread::ScriptThread;
use dom_struct::dom_struct;
use embedder_traits::resources::{self, Resource};
use encoding_rs::Encoding;
use html5ever::buffer_queue::BufferQueue;
use html5ever::tendril::fmt::UTF8;
use html5ever::tendril::{ByteTendril, StrTendril, TendrilSink};
use html5ever::tree_builder::{ElementFlags, NextParserState, NodeOrText, QuirksMode, TreeSink};
use html5ever::{Attribute, ExpandedName, LocalName, QualName};
use hyper_serde::Serde;
use mime::{self, Mime};
use msg::constellation_msg::PipelineId;
use net_traits::{FetchMetadata, FetchResponseListener, Metadata, NetworkError};
use net_traits::{ResourceFetchTiming, ResourceTimingType};
use profile_traits::time::{
profile, ProfilerCategory, TimerMetadata, TimerMetadataFrameType, TimerMetadataReflowType,
};
use script_traits::DocumentActivity;
use servo_config::pref;
use servo_url::ServoUrl;
use std::borrow::Cow;
use std::cell::Cell;
use std::mem;
use style::context::QuirksMode as ServoQuirksMode;
use tendril::stream::LossyDecoder;
mod async_html;
mod html;
mod xml;
#[dom_struct]
/// The parser maintains two input streams: one for input from script through
/// document.write(), and one for input from network.
///
/// There is no concrete representation of the insertion point, instead it
/// always points to just before the next character from the network input,
/// with all of the script input before itself.
///
/// ```text
/// ... script input ... | ... network input ...
/// ^
/// insertion point
/// ```
pub struct ServoParser {
reflector: Reflector,
/// The document associated with this parser.
document: Dom<Document>,
/// The decoder used for the network input.
network_decoder: DomRefCell<Option<NetworkDecoder>>,
/// Input received from network.
#[ignore_malloc_size_of = "Defined in html5ever"]
network_input: DomRefCell<BufferQueue>,
/// Input received from script. Used only to support document.write().
#[ignore_malloc_size_of = "Defined in html5ever"]
script_input: DomRefCell<BufferQueue>,
/// The tokenizer of this parser.
tokenizer: DomRefCell<Tokenizer>,
/// Whether to expect any further input from the associated network request.
last_chunk_received: Cell<bool>,
/// Whether this parser should avoid passing any further data to the tokenizer.
suspended: Cell<bool>,
/// <https://html.spec.whatwg.org/multipage/#script-nesting-level>
script_nesting_level: Cell<usize>,
/// <https://html.spec.whatwg.org/multipage/#abort-a-parser>
aborted: Cell<bool>,
/// <https://html.spec.whatwg.org/multipage/#script-created-parser>
script_created_parser: bool,
}
#[derive(PartialEq)]
enum LastChunkState {
Received,
NotReceived,
}
pub struct ElementAttribute {
name: QualName,
value: DOMString,
}
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub enum ParsingAlgorithm {
Normal,
Fragment,
}
impl ElementAttribute {
pub fn new(name: QualName, value: DOMString) -> ElementAttribute {
ElementAttribute {
name: name,
value: value,
}
}
}
impl ServoParser {
pub fn parser_is_not_active(&self) -> bool {
self.can_write() || self.tokenizer.try_borrow_mut().is_ok()
}
pub fn parse_html_document(document: &Document, input: DOMString, url: ServoUrl) {
let parser = if pref!(dom.servoparser.async_html_tokenizer.enabled) {
ServoParser::new(
document,
Tokenizer::AsyncHtml(self::async_html::Tokenizer::new(document, url, None)),
LastChunkState::NotReceived,
ParserKind::Normal,
)
} else {
ServoParser::new(
document,
Tokenizer::Html(self::html::Tokenizer::new(
document,
url,
None,
ParsingAlgorithm::Normal,
)),
LastChunkState::NotReceived,
ParserKind::Normal,
)
};
parser.parse_string_chunk(String::from(input));
}
// https://html.spec.whatwg.org/multipage/#parsing-html-fragments
pub fn parse_html_fragment(
context: &Element,
input: DOMString,
) -> impl Iterator<Item = DomRoot<Node>> {
let context_node = context.upcast::<Node>();
let context_document = context_node.owner_doc();
let window = context_document.window();
let url = context_document.url();
// Step 1.
let loader = DocumentLoader::new_with_threads(
context_document.loader().resource_threads().clone(),
Some(url.clone()),
);
let document = Document::new(
window,
HasBrowsingContext::No,
Some(url.clone()),
context_document.origin().clone(),
IsHTMLDocument::HTMLDocument,
None,
None,
DocumentActivity::Inactive,
DocumentSource::FromParser,
loader,
None,
None,
Default::default(),
);
// Step 2.
document.set_quirks_mode(context_document.quirks_mode());
// Step 11.
let form = context_node
.inclusive_ancestors(ShadowIncluding::No)
.find(|element| element.is::<HTMLFormElement>());
let fragment_context = FragmentContext {
context_elem: context_node,
form_elem: form.deref(),
};
let parser = ServoParser::new(
&document,
Tokenizer::Html(self::html::Tokenizer::new(
&document,
url,
Some(fragment_context),
ParsingAlgorithm::Fragment,
)),
LastChunkState::Received,
ParserKind::Normal,
);
parser.parse_string_chunk(String::from(input));
// Step 14.
let root_element = document.GetDocumentElement().expect("no document element");
FragmentParsingResult {
inner: root_element.upcast::<Node>().children(),
}
}
pub fn parse_html_script_input(document: &Document, url: ServoUrl) {
let parser = ServoParser::new(
document,
Tokenizer::Html(self::html::Tokenizer::new(
document,
url,
None,
ParsingAlgorithm::Normal,
)),
LastChunkState::NotReceived,
ParserKind::ScriptCreated,
);
document.set_current_parser(Some(&parser));
}
pub fn parse_xml_document(document: &Document, input: DOMString, url: ServoUrl) {
let parser = ServoParser::new(
document,
Tokenizer::Xml(self::xml::Tokenizer::new(document, url)),
LastChunkState::NotReceived,
ParserKind::Normal,
);
parser.parse_string_chunk(String::from(input));
}
pub fn script_nesting_level(&self) -> usize {
self.script_nesting_level.get()
}
pub fn is_script_created(&self) -> bool {
self.script_created_parser
}
/// Corresponds to the latter part of the "Otherwise" branch of the 'An end
/// tag whose tag name is "script"' of
/// <https://html.spec.whatwg.org/multipage/#parsing-main-incdata>
///
/// This first moves everything from the script input to the beginning of
/// the network input, effectively resetting the insertion point to just
/// before the next character to be consumed.
///
///
/// ```text
/// | ... script input ... network input ...
/// ^
/// insertion point
/// ```
pub fn resume_with_pending_parsing_blocking_script(
&self,
script: &HTMLScriptElement,
result: ScriptResult,
) {
assert!(self.suspended.get());
self.suspended.set(false);
mem::swap(
&mut *self.script_input.borrow_mut(),
&mut *self.network_input.borrow_mut(),
);
while let Some(chunk) = self.script_input.borrow_mut().pop_front() {
self.network_input.borrow_mut().push_back(chunk);
}
let script_nesting_level = self.script_nesting_level.get();
assert_eq!(script_nesting_level, 0);
self.script_nesting_level.set(script_nesting_level + 1);
script.execute(result);
self.script_nesting_level.set(script_nesting_level);
if !self.suspended.get() {
self.parse_sync();
}
}
pub fn can_write(&self) -> bool {
self.script_created_parser || self.script_nesting_level.get() > 0
}
/// Steps 6-8 of https://html.spec.whatwg.org/multipage/#document.write()
pub fn write(&self, text: Vec<DOMString>) {
assert!(self.can_write());
if self.document.has_pending_parsing_blocking_script() {
// There is already a pending parsing blocking script so the
// parser is suspended, we just append everything to the
// script input and abort these steps.
for chunk in text {
self.script_input
.borrow_mut()
.push_back(String::from(chunk).into());
}
return;
}
// There is no pending parsing blocking script, so all previous calls
// to document.write() should have seen their entire input tokenized
// and process, with nothing pushed to the parser script input.
assert!(self.script_input.borrow().is_empty());
let mut input = BufferQueue::new();
for chunk in text {
input.push_back(String::from(chunk).into());
}
self.tokenize(|tokenizer| tokenizer.feed(&mut input));
if self.suspended.get() {
// Parser got suspended, insert remaining input at end of
// script input, following anything written by scripts executed
// reentrantly during this call.
while let Some(chunk) = input.pop_front() {
self.script_input.borrow_mut().push_back(chunk);
}
return;
}
assert!(input.is_empty());
}
// Steps 4-6 of https://html.spec.whatwg.org/multipage/#dom-document-close
pub fn close(&self) {
assert!(self.script_created_parser);
// Step 4.
self.last_chunk_received.set(true);
if self.suspended.get() {
// Step 5.
return;
}
// Step 6.
self.parse_sync();
}
// https://html.spec.whatwg.org/multipage/#abort-a-parser
pub fn abort(&self) {
assert!(!self.aborted.get());
self.aborted.set(true);
// Step 1.
*self.script_input.borrow_mut() = BufferQueue::new();
*self.network_input.borrow_mut() = BufferQueue::new();
// Step 2.
self.document
.set_ready_state(DocumentReadyState::Interactive);
// Step 3.
self.tokenizer.borrow_mut().end();
self.document.set_current_parser(None);
// Step 4.
self.document.set_ready_state(DocumentReadyState::Complete);
}
// https://html.spec.whatwg.org/multipage/#active-parser
pub fn is_active(&self) -> bool {
self.script_nesting_level() > 0 && !self.aborted.get()
}
#[allow(unrooted_must_root)]
fn new_inherited(
document: &Document,
tokenizer: Tokenizer,
last_chunk_state: LastChunkState,
kind: ParserKind,
) -> Self {
ServoParser {
reflector: Reflector::new(),
document: Dom::from_ref(document),
network_decoder: DomRefCell::new(Some(NetworkDecoder::new(document.encoding()))),
network_input: DomRefCell::new(BufferQueue::new()),
script_input: DomRefCell::new(BufferQueue::new()),
tokenizer: DomRefCell::new(tokenizer),
last_chunk_received: Cell::new(last_chunk_state == LastChunkState::Received),
suspended: Default::default(),
script_nesting_level: Default::default(),
aborted: Default::default(),
script_created_parser: kind == ParserKind::ScriptCreated,
}
}
#[allow(unrooted_must_root)]
fn new(
document: &Document,
tokenizer: Tokenizer,
last_chunk_state: LastChunkState,
kind: ParserKind,
) -> DomRoot<Self> {
reflect_dom_object(
Box::new(ServoParser::new_inherited(
document,
tokenizer,
last_chunk_state,
kind,
)),
document.window(),
ServoParserBinding::Wrap,
)
}
fn push_bytes_input_chunk(&self, chunk: Vec<u8>) {
let chunk = self
.network_decoder
.borrow_mut()
.as_mut()
.unwrap()
.decode(chunk);
if !chunk.is_empty() {
self.network_input.borrow_mut().push_back(chunk);
}
}
fn push_string_input_chunk(&self, chunk: String) {
self.network_input.borrow_mut().push_back(chunk.into());
}
fn parse_sync(&self) {
let metadata = TimerMetadata {
url: self.document.url().as_str().into(),
iframe: TimerMetadataFrameType::RootWindow,
incremental: TimerMetadataReflowType::FirstReflow,
};
let profiler_category = self.tokenizer.borrow().profiler_category();
profile(
profiler_category,
Some(metadata),
self.document
.window()
.upcast::<GlobalScope>()
.time_profiler_chan()
.clone(),
|| self.do_parse_sync(),
)
}
fn do_parse_sync(&self) {
assert!(self.script_input.borrow().is_empty());
// This parser will continue to parse while there is either pending input or
// the parser remains unsuspended.
if self.last_chunk_received.get() {
if let Some(decoder) = self.network_decoder.borrow_mut().take() {
let chunk = decoder.finish();
if !chunk.is_empty() {
self.network_input.borrow_mut().push_back(chunk);
}
}
}
self.tokenize(|tokenizer| tokenizer.feed(&mut *self.network_input.borrow_mut()));
if self.suspended.get() {
return;
}
assert!(self.network_input.borrow().is_empty());
if self.last_chunk_received.get() {
self.finish();
}
}
fn parse_string_chunk(&self, input: String) {
self.document.set_current_parser(Some(self));
self.push_string_input_chunk(input);
if !self.suspended.get() {
self.parse_sync();
}
}
fn parse_bytes_chunk(&self, input: Vec<u8>) {
self.document.set_current_parser(Some(self));
self.push_bytes_input_chunk(input);
if !self.suspended.get() {
self.parse_sync();
}
}
fn tokenize<F>(&self, mut feed: F)
where
F: FnMut(&mut Tokenizer) -> Result<(), DomRoot<HTMLScriptElement>>,
{
loop {
assert!(!self.suspended.get());
assert!(!self.aborted.get());
self.document.reflow_if_reflow_timer_expired();
let script = match feed(&mut *self.tokenizer.borrow_mut()) {
Ok(()) => return,
Err(script) => script,
};
let script_nesting_level = self.script_nesting_level.get();
self.script_nesting_level.set(script_nesting_level + 1);
script.prepare();
self.script_nesting_level.set(script_nesting_level);
if self.document.has_pending_parsing_blocking_script() {
self.suspended.set(true);
return;
}
if self.aborted.get() {
return;
}
}
}
// https://html.spec.whatwg.org/multipage/#the-end
fn finish(&self) {
assert!(!self.suspended.get());
assert!(self.last_chunk_received.get());
assert!(self.script_input.borrow().is_empty());
assert!(self.network_input.borrow().is_empty());
assert!(self.network_decoder.borrow().is_none());
// Step 1.
self.document
.set_ready_state(DocumentReadyState::Interactive);
// Step 2.
self.tokenizer.borrow_mut().end();
self.document.set_current_parser(None);
// Steps 3-12 are in another castle, namely finish_load.
let url = self.tokenizer.borrow().url().clone();
self.document.finish_load(LoadType::PageSource(url));
}
}
struct FragmentParsingResult<I>
where
I: Iterator<Item = DomRoot<Node>>,
{
inner: I,
}
impl<I> Iterator for FragmentParsingResult<I>
where
I: Iterator<Item = DomRoot<Node>>,
{
type Item = DomRoot<Node>;
fn next(&mut self) -> Option<DomRoot<Node>> {
let next = self.inner.next()?;
next.remove_self();
Some(next)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[derive(JSTraceable, MallocSizeOf, PartialEq)]
enum ParserKind {
Normal,
ScriptCreated,
}
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
enum Tokenizer {
Html(self::html::Tokenizer),
AsyncHtml(self::async_html::Tokenizer),
Xml(self::xml::Tokenizer),
}
impl Tokenizer {
fn feed(&mut self, input: &mut BufferQueue) -> Result<(), DomRoot<HTMLScriptElement>> {
match *self {
Tokenizer::Html(ref mut tokenizer) => tokenizer.feed(input),
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.feed(input),
Tokenizer::Xml(ref mut tokenizer) => tokenizer.feed(input),
}
}
fn end(&mut self) {
match *self {
Tokenizer::Html(ref mut tokenizer) => tokenizer.end(),
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.end(),
Tokenizer::Xml(ref mut tokenizer) => tokenizer.end(),
}
}
fn url(&self) -> &ServoUrl {
match *self {
Tokenizer::Html(ref tokenizer) => tokenizer.url(),
Tokenizer::AsyncHtml(ref tokenizer) => tokenizer.url(),
Tokenizer::Xml(ref tokenizer) => tokenizer.url(),
}
}
fn set_plaintext_state(&mut self) {
match *self {
Tokenizer::Html(ref mut tokenizer) => tokenizer.set_plaintext_state(),
Tokenizer::AsyncHtml(ref mut tokenizer) => tokenizer.set_plaintext_state(),
Tokenizer::Xml(_) => unimplemented!(),
}
}
fn profiler_category(&self) -> ProfilerCategory {
match *self {
Tokenizer::Html(_) => ProfilerCategory::ScriptParseHTML,
Tokenizer::AsyncHtml(_) => ProfilerCategory::ScriptParseHTML,
Tokenizer::Xml(_) => ProfilerCategory::ScriptParseXML,
}
}
}
/// The context required for asynchronously fetching a document
/// and parsing it progressively.
#[derive(JSTraceable)]
pub struct ParserContext {
/// The parser that initiated the request.
parser: Option<Trusted<ServoParser>>,
/// Is this a synthesized document
is_synthesized_document: bool,
/// The pipeline associated with this document.
id: PipelineId,
/// The URL for this document.
url: ServoUrl,
/// timing data for this resource
resource_timing: ResourceFetchTiming,
}
impl ParserContext {
pub fn new(id: PipelineId, url: ServoUrl) -> ParserContext {
ParserContext {
parser: None,
is_synthesized_document: false,
id: id,
url: url,
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Navigation),
}
}
}
impl FetchResponseListener for ParserContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, meta_result: Result<FetchMetadata, NetworkError>) {
let mut ssl_error = None;
let mut network_error = None;
let metadata = match meta_result {
Ok(meta) => Some(match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
}),
Err(NetworkError::SslValidation(url, reason)) => {
ssl_error = Some(reason);
let mut meta = Metadata::default(url);
let mime: Option<Mime> = "text/html".parse().ok();
meta.set_content_type(mime.as_ref());
Some(meta)
},
Err(NetworkError::Internal(reason)) => {
network_error = Some(reason);
let mut meta = Metadata::default(self.url.clone());
let mime: Option<Mime> = "text/html".parse().ok();
meta.set_content_type(mime.as_ref());
Some(meta)
},
Err(_) => None,
};
let content_type: Option<Mime> = metadata
.clone()
.and_then(|meta| meta.content_type)
.map(Serde::into_inner)
.map(Into::into);
let parser = match ScriptThread::page_headers_available(&self.id, metadata) {
Some(parser) => parser,
None => return,
};
if parser.aborted.get() {
return;
}
self.parser = Some(Trusted::new(&*parser));
match content_type {
Some(ref mime) if mime.type_() == mime::IMAGE => {
self.is_synthesized_document = true;
let page = "<html><body></body></html>".into();
parser.push_string_input_chunk(page);
parser.parse_sync();
let doc = &parser.document;
let doc_body = DomRoot::upcast::<Node>(doc.GetBody().unwrap());
let img = HTMLImageElement::new(local_name!("img"), None, doc);
img.SetSrc(USVString(self.url.to_string()));
doc_body
.AppendChild(&DomRoot::upcast::<Node>(img))
.expect("Appending failed");
},
Some(ref mime) if mime.type_() == mime::TEXT && mime.subtype() == mime::PLAIN => {
// https://html.spec.whatwg.org/multipage/#read-text
let page = "<pre>\n".into();
parser.push_string_input_chunk(page);
parser.parse_sync();
parser.tokenizer.borrow_mut().set_plaintext_state();
},
Some(ref mime) if mime.type_() == mime::TEXT && mime.subtype() == mime::HTML => {
// Handle text/html
if let Some(reason) = ssl_error {
self.is_synthesized_document = true;
let page = resources::read_string(Resource::BadCertHTML);
let page = page.replace("${reason}", &reason);
parser.push_string_input_chunk(page);
parser.parse_sync();
}
if let Some(reason) = network_error {
self.is_synthesized_document = true;
let page = resources::read_string(Resource::NetErrorHTML);
let page = page.replace("${reason}", &reason);
parser.push_string_input_chunk(page);
parser.parse_sync();
}
},
// Handle text/xml, application/xml
Some(ref mime)
if (mime.type_() == mime::TEXT && mime.subtype() == mime::XML) ||
(mime.type_() == mime::APPLICATION && mime.subtype() == mime::XML) => {},
Some(ref mime)
if mime.type_() == mime::APPLICATION &&
mime.subtype().as_str() == "xhtml" &&
mime.suffix() == Some(mime::XML) => {}, // Handle xhtml (application/xhtml+xml)
Some(ref mime) => {
// Show warning page for unknown mime types.
let page = format!(
"<html><body><p>Unknown content type ({}/{}).</p></body></html>",
mime.type_().as_str(),
mime.subtype().as_str()
);
self.is_synthesized_document = true;
parser.push_string_input_chunk(page);
parser.parse_sync();
},
None => {
// No content-type header.
// Merge with #4212 when fixed.
},
}
}
fn process_response_chunk(&mut self, payload: Vec<u8>) {
if self.is_synthesized_document {
return;
}
let parser = match self.parser.as_ref() {
Some(parser) => parser.root(),
None => return,
};
if parser.aborted.get() {
return;
}
parser.parse_bytes_chunk(payload);
}
// This method is called via script_thread::handle_fetch_eof, so we must call
// submit_resource_timing in this function
// Resource listeners are called via net_traits::Action::process, which handles submission for them
fn process_response_eof(&mut self, status: Result<ResourceFetchTiming, NetworkError>) {
let parser = match self.parser.as_ref() {
Some(parser) => parser.root(),
None => return,
};
if parser.aborted.get() {
return;
}
match status {
// are we throwing this away or can we use it?
Ok(_) => (),
// TODO(Savago): we should send a notification to callers #5463.
Err(err) => debug!("Failed to load page URL {}, error: {:?}", self.url, err),
}
parser
.document
.set_redirect_count(self.resource_timing.redirect_count);
parser.last_chunk_received.set(true);
if !parser.suspended.get() {
parser.parse_sync();
}
//TODO only submit if this is the current document resource
self.submit_resource_timing();
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
// store a PerformanceNavigationTiming entry in the globalscope's Performance buffer
fn submit_resource_timing(&mut self) {
let parser = match self.parser.as_ref() {
Some(parser) => parser.root(),
None => return,
};
if parser.aborted.get() {
return;
}
let document = &parser.document;
//TODO nav_start and nav_start_precise
let performance_entry =
PerformanceNavigationTiming::new(&document.global(), 0, 0, &document);
document
.global()
.performance()
.queue_entry(performance_entry.upcast::<PerformanceEntry>(), true);
}
}
impl PreInvoke for ParserContext {}
pub struct FragmentContext<'a> {
pub context_elem: &'a Node,
pub form_elem: Option<&'a Node>,
}
#[allow(unrooted_must_root)]
fn insert(parent: &Node, reference_child: Option<&Node>, child: NodeOrText<Dom<Node>>) {
match child {
NodeOrText::AppendNode(n) => {
parent.InsertBefore(&n, reference_child).unwrap();
},
NodeOrText::AppendText(t) => {
let text = reference_child
.and_then(Node::GetPreviousSibling)
.or_else(|| parent.GetLastChild())
.and_then(DomRoot::downcast::<Text>);
if let Some(text) = text {
text.upcast::<CharacterData>().append_data(&t);
} else {
let text = Text::new(String::from(t).into(), &parent.owner_doc());
parent.InsertBefore(text.upcast(), reference_child).unwrap();
}
},
}
}
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
pub struct Sink {
base_url: ServoUrl,
document: Dom<Document>,
current_line: u64,
script: MutNullableDom<HTMLScriptElement>,
parsing_algorithm: ParsingAlgorithm,
}
impl Sink {
fn same_tree(&self, x: &Dom<Node>, y: &Dom<Node>) -> bool {
let x = x.downcast::<Element>().expect("Element node expected");
let y = y.downcast::<Element>().expect("Element node expected");
x.is_in_same_home_subtree(y)
}
fn has_parent_node(&self, node: &Dom<Node>) -> bool {
node.GetParentNode().is_some()
}
}
#[allow(unrooted_must_root)] // FIXME: really?
impl TreeSink for Sink {
type Output = Self;
fn finish(self) -> Self {
self
}
type Handle = Dom<Node>;
fn get_document(&mut self) -> Dom<Node> {
Dom::from_ref(self.document.upcast())
}
fn get_template_contents(&mut self, target: &Dom<Node>) -> Dom<Node> {
let template = target
.downcast::<HTMLTemplateElement>()
.expect("tried to get template contents of non-HTMLTemplateElement in HTML parsing");
Dom::from_ref(template.Content().upcast())
}
fn same_node(&self, x: &Dom<Node>, y: &Dom<Node>) -> bool {
x == y
}
fn elem_name<'a>(&self, target: &'a Dom<Node>) -> ExpandedName<'a> {
let elem = target
.downcast::<Element>()
.expect("tried to get name of non-Element in HTML parsing");
ExpandedName {
ns: elem.namespace(),
local: elem.local_name(),
}
}
fn create_element(
&mut self,
name: QualName,
attrs: Vec<Attribute>,
_flags: ElementFlags,
) -> Dom<Node> {
let attrs = attrs
.into_iter()
.map(|attr| ElementAttribute::new(attr.name, DOMString::from(String::from(attr.value))))
.collect();
let element = create_element_for_token(
name,
attrs,
&*self.document,
ElementCreator::ParserCreated(self.current_line),
self.parsing_algorithm,
);
Dom::from_ref(element.upcast())
}
fn create_comment(&mut self, text: StrTendril) -> Dom<Node> {
let comment = Comment::new(DOMString::from(String::from(text)), &*self.document);
Dom::from_ref(comment.upcast())
}
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Dom<Node> {
let doc = &*self.document;
let pi = ProcessingInstruction::new(
DOMString::from(String::from(target)),
DOMString::from(String::from(data)),
doc,
);
Dom::from_ref(pi.upcast())
}
fn associate_with_form(
&mut self,
target: &Dom<Node>,
form: &Dom<Node>,
nodes: (&Dom<Node>, Option<&Dom<Node>>),
) {
let (element, prev_element) = nodes;
let tree_node = prev_element.map_or(element, |prev| {
if self.has_parent_node(element) {
element
} else {
prev
}
});
if !self.same_tree(tree_node, form) {
return;
}
let node = target;
let form = DomRoot::downcast::<HTMLFormElement>(DomRoot::from_ref(&**form))
.expect("Owner must be a form element");
let elem = node.downcast::<Element>();
let control = elem.and_then(|e| e.as_maybe_form_control());
if let Some(control) = control {
control.set_form_owner_from_parser(&form);
} else {
// TODO remove this code when keygen is implemented.
assert_eq!(
node.NodeName(),
"KEYGEN",
"Unknown form-associatable element"
);
}
}
fn append_before_sibling(&mut self, sibling: &Dom<Node>, new_node: NodeOrText<Dom<Node>>) {
let parent = sibling
.GetParentNode()
.expect("append_before_sibling called on node without parent");
insert(&parent, Some(&*sibling), new_node);
}
fn parse_error(&mut self, msg: Cow<'static, str>) {
debug!("Parse error: {}", msg);
}
fn set_quirks_mode(&mut self, mode: QuirksMode) {
let mode = match mode {
QuirksMode::Quirks => ServoQuirksMode::Quirks,
QuirksMode::LimitedQuirks => ServoQuirksMode::LimitedQuirks,
QuirksMode::NoQuirks => ServoQuirksMode::NoQuirks,
};
self.document.set_quirks_mode(mode);
}
fn append(&mut self, parent: &Dom<Node>, child: NodeOrText<Dom<Node>>) {
insert(&parent, None, child);
}
fn append_based_on_parent_node(
&mut self,
elem: &Dom<Node>,
prev_elem: &Dom<Node>,
child: NodeOrText<Dom<Node>>,
) {
if self.has_parent_node(elem) {
self.append_before_sibling(elem, child);
} else {
self.append(prev_elem, child);
}
}
fn append_doctype_to_document(
&mut self,
name: StrTendril,
public_id: StrTendril,
system_id: StrTendril,
) {
let doc = &*self.document;
let doctype = DocumentType::new(
DOMString::from(String::from(name)),
Some(DOMString::from(String::from(public_id))),<|fim▁hole|> doc.upcast::<Node>()
.AppendChild(doctype.upcast())
.expect("Appending failed");
}
fn add_attrs_if_missing(&mut self, target: &Dom<Node>, attrs: Vec<Attribute>) {
let elem = target
.downcast::<Element>()
.expect("tried to set attrs on non-Element in HTML parsing");
for attr in attrs {
elem.set_attribute_from_parser(
attr.name,
DOMString::from(String::from(attr.value)),
None,
);
}
}
fn remove_from_parent(&mut self, target: &Dom<Node>) {
if let Some(ref parent) = target.GetParentNode() {
parent.RemoveChild(&*target).unwrap();
}
}
fn mark_script_already_started(&mut self, node: &Dom<Node>) {
let script = node.downcast::<HTMLScriptElement>();
script.map(|script| script.set_already_started(true));
}
fn complete_script(&mut self, node: &Dom<Node>) -> NextParserState {
if let Some(script) = node.downcast() {
self.script.set(Some(script));
NextParserState::Suspend
} else {
NextParserState::Continue
}
}
fn reparent_children(&mut self, node: &Dom<Node>, new_parent: &Dom<Node>) {
while let Some(ref child) = node.GetFirstChild() {
new_parent.AppendChild(&child).unwrap();
}
}
/// <https://html.spec.whatwg.org/multipage/#html-integration-point>
/// Specifically, the <annotation-xml> cases.
fn is_mathml_annotation_xml_integration_point(&self, handle: &Dom<Node>) -> bool {
let elem = handle.downcast::<Element>().unwrap();
elem.get_attribute(&ns!(), &local_name!("encoding"))
.map_or(false, |attr| {
attr.value().eq_ignore_ascii_case("text/html") ||
attr.value().eq_ignore_ascii_case("application/xhtml+xml")
})
}
fn set_current_line(&mut self, line_number: u64) {
self.current_line = line_number;
}
fn pop(&mut self, node: &Dom<Node>) {
let node = DomRoot::from_ref(&**node);
vtable_for(&node).pop();
}
}
/// https://html.spec.whatwg.org/multipage/#create-an-element-for-the-token
fn create_element_for_token(
name: QualName,
attrs: Vec<ElementAttribute>,
document: &Document,
creator: ElementCreator,
parsing_algorithm: ParsingAlgorithm,
) -> DomRoot<Element> {
// Step 3.
let is = attrs
.iter()
.find(|attr| attr.name.local.eq_str_ignore_ascii_case("is"))
.map(|attr| LocalName::from(&*attr.value));
// Step 4.
let definition = document.lookup_custom_element_definition(&name.ns, &name.local, is.as_ref());
// Step 5.
let will_execute_script =
definition.is_some() && parsing_algorithm != ParsingAlgorithm::Fragment;
// Step 6.
if will_execute_script {
// Step 6.1.
document.increment_throw_on_dynamic_markup_insertion_counter();
// Step 6.2
if is_execution_stack_empty() {
document
.window()
.upcast::<GlobalScope>()
.perform_a_microtask_checkpoint();
}
// Step 6.3
ScriptThread::push_new_element_queue()
}
// Step 7.
let creation_mode = if will_execute_script {
CustomElementCreationMode::Synchronous
} else {
CustomElementCreationMode::Asynchronous
};
let element = Element::create(name, is, document, creator, creation_mode);
// Step 8.
for attr in attrs {
element.set_attribute_from_parser(attr.name, attr.value, None);
}
// Step 9.
if will_execute_script {
// Steps 9.1 - 9.2.
ScriptThread::pop_current_element_queue();
// Step 9.3.
document.decrement_throw_on_dynamic_markup_insertion_counter();
}
// TODO: Step 10.
// TODO: Step 11.
// Step 12 is handled in `associate_with_form`.
// Step 13.
element
}
#[derive(JSTraceable, MallocSizeOf)]
struct NetworkDecoder {
#[ignore_malloc_size_of = "Defined in tendril"]
decoder: LossyDecoder<NetworkSink>,
}
impl NetworkDecoder {
fn new(encoding: &'static Encoding) -> Self {
Self {
decoder: LossyDecoder::new_encoding_rs(encoding, Default::default()),
}
}
fn decode(&mut self, chunk: Vec<u8>) -> StrTendril {
self.decoder.process(ByteTendril::from(&*chunk));
mem::replace(
&mut self.decoder.inner_sink_mut().output,
Default::default(),
)
}
fn finish(self) -> StrTendril {
self.decoder.finish()
}
}
#[derive(Default, JSTraceable)]
struct NetworkSink {
output: StrTendril,
}
impl TendrilSink<UTF8> for NetworkSink {
type Output = StrTendril;
fn process(&mut self, t: StrTendril) {
if self.output.is_empty() {
self.output = t;
} else {
self.output.push_tendril(&t);
}
}
fn error(&mut self, _desc: Cow<'static, str>) {}
fn finish(self) -> Self::Output {
self.output
}
}<|fim▁end|> | Some(DOMString::from(String::from(system_id))),
doc,
); |
<|file_name|>file.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <[email protected]>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: file
version_added: historical
short_description: Manage files and file properties
extends_documentation_fragment: files
description:
- Set attributes of files, symlinks or directories.
- Alternatively, remove files, symlinks or directories.
- Many other modules support the same options as the C(file) module - including M(copy), M(template), and M(assemble).
- For Windows targets, use the M(win_file) module instead.
options:
path:
description:
- Path to the file being managed.
type: path
required: yes
aliases: [ dest, name ]
state:
description:
- If C(absent), directories will be recursively deleted, and files or symlinks will
be unlinked. In the case of a directory, if C(diff) is declared, you will see the files and folders deleted listed
under C(path_contents). Note that C(absent) will not cause C(file) to fail if the C(path) does
not exist as the state did not change.
- If C(directory), all intermediate subdirectories will be created if they
do not exist. Since Ansible 1.7 they will be created with the supplied permissions.
- If C(file), without any other options this works mostly as a 'stat' and will return the current state of C(path).
Even with other options (i.e C(mode)), the file will be modified but will NOT be created if it does not exist;
see the C(touch) value or the M(copy) or M(template) module if you want that behavior.
- If C(hard), the hard link will be created or changed.
- If C(link), the symbolic link will be created or changed.
- If C(touch) (new in 1.4), an empty file will be created if the C(path) does not
exist, while an existing file or directory will receive updated file access and
modification times (similar to the way C(touch) works from the command line).
type: str
default: file
choices: [ absent, directory, file, hard, link, touch ]
src:
description:
- Path of the file to link to.
- This applies only to C(state=link) and C(state=hard).
- For C(state=link), this will also accept a non-existing path.
- Relative paths are relative to the file being created (C(path)) which is how
the Unix command C(ln -s SRC DEST) treats relative paths.
type: path
recurse:
description:
- Recursively set the specified file attributes on directory contents.
- This applies only when C(state) is set to C(directory).
type: bool
default: no
version_added: '1.1'
force:
description:
- >
Force the creation of the symlinks in two cases: the source file does
not exist (but will appear later); the destination exists and is a file (so, we need to unlink the
C(path) file and create symlink to the C(src) file in place of it).
type: bool
default: no
follow:
description:
- This flag indicates that filesystem links, if they exist, should be followed.
- Previous to Ansible 2.5, this was C(no) by default.
type: bool
default: yes
version_added: '1.8'
modification_time:
description:
- This parameter indicates the time the file's modification time should be set to.
- Should be C(preserve) when no modification is required, C(YYYYMMDDHHMM.SS) when using default time format, or C(now).
- Default is None meaning that C(preserve) is the default for C(state=[file,directory,link,hard]) and C(now) is default for C(state=touch).
type: str
version_added: "2.7"
modification_time_format:
description:
- When used with C(modification_time), indicates the time format that must be used.
- Based on default Python format (see time.strftime doc).
type: str
default: "%Y%m%d%H%M.%S"
version_added: '2.7'
access_time:
description:
- This parameter indicates the time the file's access time should be set to.
- Should be C(preserve) when no modification is required, C(YYYYMMDDHHMM.SS) when using default time format, or C(now).
- Default is C(None) meaning that C(preserve) is the default for C(state=[file,directory,link,hard]) and C(now) is default for C(state=touch).
type: str
version_added: '2.7'
access_time_format:
description:
- When used with C(access_time), indicates the time format that must be used.
- Based on default Python format (see time.strftime doc).
type: str
default: "%Y%m%d%H%M.%S"
version_added: '2.7'
seealso:
- module: assemble
- module: copy
- module: stat
- module: template
- module: win_file
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = r'''
- name: Change file ownership, group and permissions
file:
path: /etc/foo.conf
owner: foo
group: foo
mode: '0644'
- name: Give insecure permissions to an existing file
file:
path: /work
owner: root
group: root
mode: '1777'
- name: Create a symbolic link
file:
src: /file/to/link/to
dest: /path/to/symlink
owner: foo
group: foo
state: link
- name: Create two hard links
file:
src: '/tmp/{{ item.src }}'
dest: '{{ item.dest }}'
state: hard
loop:
- { src: x, dest: y }
- { src: z, dest: k }
- name: Touch a file, using symbolic modes to set the permissions (equivalent to 0644)
file:
path: /etc/foo.conf
state: touch
mode: u=rw,g=r,o=r
- name: Touch the same file, but add/remove some permissions
file:
path: /etc/foo.conf
state: touch
mode: u+rw,g-wx,o-rwx
- name: Touch again the same file, but dont change times this makes the task idempotent
file:
path: /etc/foo.conf
state: touch
mode: u+rw,g-wx,o-rwx
modification_time: preserve
access_time: preserve
- name: Create a directory if it does not exist
file:
path: /etc/some_directory
state: directory
mode: '0755'
- name: Update modification and access time of given file
file:
path: /etc/some_file
state: file
modification_time: now
access_time: now
- name: Set access time based on seconds from epoch value
file:
path: /etc/another_file
state: file
access_time: '{{ "%Y%m%d%H%M.%S" | strftime(stat_var.stat.atime) }}'
- name: Recursively change ownership of a directory
file:
path: /etc/foo
state: directory
recurse: yes
owner: foo
group: foo
- name: Remove file (delete file)
file:
path: /etc/foo.txt
state: absent
- name: Recursively remove directory
file:
path: /etc/foo
state: absent
'''
RETURN = r'''
'''
import errno
import os
import shutil
import sys
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes, to_native
# There will only be a single AnsibleModule object per module
module = None
class AnsibleModuleError(Exception):
def __init__(self, results):
self.results = results
def __repr__(self):
print('AnsibleModuleError(results={0})'.format(self.results))
class ParameterError(AnsibleModuleError):
pass
class Sentinel(object):
def __new__(cls, *args, **kwargs):
return cls
def _ansible_excepthook(exc_type, exc_value, tb):
# Using an exception allows us to catch it if the calling code knows it can recover
if issubclass(exc_type, AnsibleModuleError):
module.fail_json(**exc_value.results)
else:
sys.__excepthook__(exc_type, exc_value, tb)
def additional_parameter_handling(params):
"""Additional parameter validation and reformatting"""
# When path is a directory, rewrite the pathname to be the file inside of the directory
# TODO: Why do we exclude link? Why don't we exclude directory? Should we exclude touch?
# I think this is where we want to be in the future:
# when isdir(path):
# if state == absent: Remove the directory
# if state == touch: Touch the directory
# if state == directory: Assert the directory is the same as the one specified
# if state == file: place inside of the directory (use _original_basename)
# if state == link: place inside of the directory (use _original_basename. Fallback to src?)
# if state == hard: place inside of the directory (use _original_basename. Fallback to src?)
if (params['state'] not in ("link", "absent") and os.path.isdir(to_bytes(params['path'], errors='surrogate_or_strict'))):
basename = None
if params['_original_basename']:
basename = params['_original_basename']
elif params['src']:
basename = os.path.basename(params['src'])
if basename:<|fim▁hole|> prev_state = get_state(to_bytes(params['path'], errors='surrogate_or_strict'))
if params['state'] is None:
if prev_state != 'absent':
params['state'] = prev_state
elif params['recurse']:
params['state'] = 'directory'
else:
params['state'] = 'file'
# make sure the target path is a directory when we're doing a recursive operation
if params['recurse'] and params['state'] != 'directory':
raise ParameterError(results={"msg": "recurse option requires state to be 'directory'",
"path": params["path"]})
# Fail if 'src' but no 'state' is specified
if params['src'] and params['state'] not in ('link', 'hard'):
raise ParameterError(results={'msg': "src option requires state to be 'link' or 'hard'",
'path': params['path']})
def get_state(path):
''' Find out current state '''
b_path = to_bytes(path, errors='surrogate_or_strict')
try:
if os.path.lexists(b_path):
if os.path.islink(b_path):
return 'link'
elif os.path.isdir(b_path):
return 'directory'
elif os.stat(b_path).st_nlink > 1:
return 'hard'
# could be many other things, but defaulting to file
return 'file'
return 'absent'
except OSError as e:
if e.errno == errno.ENOENT: # It may already have been removed
return 'absent'
else:
raise
# This should be moved into the common file utilities
def recursive_set_attributes(b_path, follow, file_args, mtime, atime):
changed = False
try:
for b_root, b_dirs, b_files in os.walk(b_path):
for b_fsobj in b_dirs + b_files:
b_fsname = os.path.join(b_root, b_fsobj)
if not os.path.islink(b_fsname):
tmp_file_args = file_args.copy()
tmp_file_args['path'] = to_native(b_fsname, errors='surrogate_or_strict')
changed |= module.set_fs_attributes_if_different(tmp_file_args, changed, expand=False)
changed |= update_timestamp_for_file(tmp_file_args['path'], mtime, atime)
else:
# Change perms on the link
tmp_file_args = file_args.copy()
tmp_file_args['path'] = to_native(b_fsname, errors='surrogate_or_strict')
changed |= module.set_fs_attributes_if_different(tmp_file_args, changed, expand=False)
changed |= update_timestamp_for_file(tmp_file_args['path'], mtime, atime)
if follow:
b_fsname = os.path.join(b_root, os.readlink(b_fsname))
# The link target could be nonexistent
if os.path.exists(b_fsname):
if os.path.isdir(b_fsname):
# Link is a directory so change perms on the directory's contents
changed |= recursive_set_attributes(b_fsname, follow, file_args, mtime, atime)
# Change perms on the file pointed to by the link
tmp_file_args = file_args.copy()
tmp_file_args['path'] = to_native(b_fsname, errors='surrogate_or_strict')
changed |= module.set_fs_attributes_if_different(tmp_file_args, changed, expand=False)
changed |= update_timestamp_for_file(tmp_file_args['path'], mtime, atime)
except RuntimeError as e:
# on Python3 "RecursionError" is raised which is derived from "RuntimeError"
# TODO once this function is moved into the common file utilities, this should probably raise more general exception
raise AnsibleModuleError(
results={'msg': "Could not recursively set attributes on %s. Original error was: '%s'" % (to_native(b_path), to_native(e))}
)
return changed
def initial_diff(path, state, prev_state):
diff = {'before': {'path': path},
'after': {'path': path},
}
if prev_state != state:
diff['before']['state'] = prev_state
diff['after']['state'] = state
if state == 'absent' and prev_state == 'directory':
walklist = {
'directories': [],
'files': [],
}
b_path = to_bytes(path, errors='surrogate_or_strict')
for base_path, sub_folders, files in os.walk(b_path):
for folder in sub_folders:
folderpath = os.path.join(base_path, folder)
walklist['directories'].append(folderpath)
for filename in files:
filepath = os.path.join(base_path, filename)
walklist['files'].append(filepath)
diff['before']['path_content'] = walklist
return diff
#
# States
#
def get_timestamp_for_time(formatted_time, time_format):
if formatted_time == 'preserve':
return None
elif formatted_time == 'now':
return Sentinel
else:
try:
struct = time.strptime(formatted_time, time_format)
struct_time = time.mktime(struct)
except (ValueError, OverflowError) as e:
raise AnsibleModuleError(results={'msg': 'Error while obtaining timestamp for time %s using format %s: %s'
% (formatted_time, time_format, to_native(e, nonstring='simplerepr'))})
return struct_time
def update_timestamp_for_file(path, mtime, atime, diff=None):
b_path = to_bytes(path, errors='surrogate_or_strict')
try:
# When mtime and atime are set to 'now', rely on utime(path, None) which does not require ownership of the file
# https://github.com/ansible/ansible/issues/50943
if mtime is Sentinel and atime is Sentinel:
# It's not exact but we can't rely on os.stat(path).st_mtime after setting os.utime(path, None) as it may
# not be updated. Just use the current time for the diff values
mtime = atime = time.time()
previous_mtime = os.stat(b_path).st_mtime
previous_atime = os.stat(b_path).st_atime
set_time = None
else:
# If both parameters are None 'preserve', nothing to do
if mtime is None and atime is None:
return False
previous_mtime = os.stat(b_path).st_mtime
previous_atime = os.stat(b_path).st_atime
if mtime is None:
mtime = previous_mtime
elif mtime is Sentinel:
mtime = time.time()
if atime is None:
atime = previous_atime
elif atime is Sentinel:
atime = time.time()
# If both timestamps are already ok, nothing to do
if mtime == previous_mtime and atime == previous_atime:
return False
set_time = (atime, mtime)
os.utime(b_path, set_time)
if diff is not None:
if 'before' not in diff:
diff['before'] = {}
if 'after' not in diff:
diff['after'] = {}
if mtime != previous_mtime:
diff['before']['mtime'] = previous_mtime
diff['after']['mtime'] = mtime
if atime != previous_atime:
diff['before']['atime'] = previous_atime
diff['after']['atime'] = atime
except OSError as e:
raise AnsibleModuleError(results={'msg': 'Error while updating modification or access time: %s'
% to_native(e, nonstring='simplerepr'), 'path': path})
return True
def keep_backward_compatibility_on_timestamps(parameter, state):
if state in ['file', 'hard', 'directory', 'link'] and parameter is None:
return 'preserve'
elif state == 'touch' and parameter is None:
return 'now'
else:
return parameter
def execute_diff_peek(path):
"""Take a guess as to whether a file is a binary file"""
b_path = to_bytes(path, errors='surrogate_or_strict')
appears_binary = False
try:
with open(b_path, 'rb') as f:
head = f.read(8192)
except Exception:
# If we can't read the file, we're okay assuming it's text
pass
else:
if b"\x00" in head:
appears_binary = True
return appears_binary
def ensure_absent(path):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
result = {}
if prev_state != 'absent':
diff = initial_diff(path, 'absent', prev_state)
if not module.check_mode:
if prev_state == 'directory':
try:
shutil.rmtree(b_path, ignore_errors=False)
except Exception as e:
raise AnsibleModuleError(results={'msg': "rmtree failed: %s" % to_native(e)})
else:
try:
os.unlink(b_path)
except OSError as e:
if e.errno != errno.ENOENT: # It may already have been removed
raise AnsibleModuleError(results={'msg': "unlinking failed: %s " % to_native(e),
'path': path})
result.update({'path': path, 'changed': True, 'diff': diff, 'state': 'absent'})
else:
result.update({'path': path, 'changed': False, 'state': 'absent'})
return result
def execute_touch(path, follow, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
changed = False
result = {'dest': path}
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
if not module.check_mode:
if prev_state == 'absent':
# Create an empty file if the filename did not already exist
try:
open(b_path, 'wb').close()
changed = True
except (OSError, IOError) as e:
raise AnsibleModuleError(results={'msg': 'Error, could not touch target: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
# Update the attributes on the file
diff = initial_diff(path, 'touch', prev_state)
file_args = module.load_file_common_arguments(module.params)
try:
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
except SystemExit as e:
if e.code:
# We take this to mean that fail_json() was called from
# somewhere in basic.py
if prev_state == 'absent':
# If we just created the file we can safely remove it
os.remove(b_path)
raise
result['changed'] = changed
result['diff'] = diff
return result
def ensure_file_attributes(path, follow, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
file_args = module.load_file_common_arguments(module.params)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
if prev_state != 'file':
if follow and prev_state == 'link':
# follow symlink and operate on original
b_path = os.path.realpath(b_path)
path = to_native(b_path, errors='strict')
prev_state = get_state(b_path)
file_args['path'] = path
if prev_state not in ('file', 'hard'):
# file is not absent and any other state is a conflict
raise AnsibleModuleError(results={'msg': 'file (%s) is %s, cannot continue' % (path, prev_state),
'path': path})
diff = initial_diff(path, 'file', prev_state)
changed = module.set_fs_attributes_if_different(file_args, False, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
return {'path': path, 'changed': changed, 'diff': diff}
def ensure_directory(path, follow, recurse, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
prev_state = get_state(b_path)
file_args = module.load_file_common_arguments(module.params)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
# For followed symlinks, we need to operate on the target of the link
if follow and prev_state == 'link':
b_path = os.path.realpath(b_path)
path = to_native(b_path, errors='strict')
file_args['path'] = path
prev_state = get_state(b_path)
changed = False
diff = initial_diff(path, 'directory', prev_state)
if prev_state == 'absent':
# Create directory and assign permissions to it
if module.check_mode:
return {'changed': True, 'diff': diff}
curpath = ''
try:
# Split the path so we can apply filesystem attributes recursively
# from the root (/) directory for absolute paths or the base path
# of a relative path. We can then walk the appropriate directory
# path to apply attributes.
# Something like mkdir -p with mode applied to all of the newly created directories
for dirname in path.strip('/').split('/'):
curpath = '/'.join([curpath, dirname])
# Remove leading slash if we're creating a relative path
if not os.path.isabs(path):
curpath = curpath.lstrip('/')
b_curpath = to_bytes(curpath, errors='surrogate_or_strict')
if not os.path.exists(b_curpath):
try:
os.mkdir(b_curpath)
changed = True
except OSError as ex:
# Possibly something else created the dir since the os.path.exists
# check above. As long as it's a dir, we don't need to error out.
if not (ex.errno == errno.EEXIST and os.path.isdir(b_curpath)):
raise
tmp_file_args = file_args.copy()
tmp_file_args['path'] = curpath
changed = module.set_fs_attributes_if_different(tmp_file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
except Exception as e:
raise AnsibleModuleError(results={'msg': 'There was an issue creating %s as requested:'
' %s' % (curpath, to_native(e)),
'path': path})
return {'path': path, 'changed': changed, 'diff': diff}
elif prev_state != 'directory':
# We already know prev_state is not 'absent', therefore it exists in some form.
raise AnsibleModuleError(results={'msg': '%s already exists as a %s' % (path, prev_state),
'path': path})
#
# previous state == directory
#
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
if recurse:
changed |= recursive_set_attributes(b_path, follow, file_args, mtime, atime)
return {'path': path, 'changed': changed, 'diff': diff}
def ensure_symlink(path, src, follow, force, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
b_src = to_bytes(src, errors='surrogate_or_strict')
prev_state = get_state(b_path)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
# source is both the source of a symlink or an informational passing of the src for a template module
# or copy module, even if this module never uses it, it is needed to key off some things
if src is None:
if follow:
# use the current target of the link as the source
src = to_native(os.path.realpath(b_path), errors='strict')
b_src = to_bytes(src, errors='surrogate_or_strict')
if not os.path.islink(b_path) and os.path.isdir(b_path):
relpath = path
else:
b_relpath = os.path.dirname(b_path)
relpath = to_native(b_relpath, errors='strict')
absrc = os.path.join(relpath, src)
b_absrc = to_bytes(absrc, errors='surrogate_or_strict')
if not force and not os.path.exists(b_absrc):
raise AnsibleModuleError(results={'msg': 'src file does not exist, use "force=yes" if you'
' really want to create the link: %s' % absrc,
'path': path, 'src': src})
if prev_state == 'directory':
if not force:
raise AnsibleModuleError(results={'msg': 'refusing to convert from %s to symlink for %s'
% (prev_state, path),
'path': path})
elif os.listdir(b_path):
# refuse to replace a directory that has files in it
raise AnsibleModuleError(results={'msg': 'the directory %s is not empty, refusing to'
' convert it' % path,
'path': path})
elif prev_state in ('file', 'hard') and not force:
raise AnsibleModuleError(results={'msg': 'refusing to convert from %s to symlink for %s'
% (prev_state, path),
'path': path})
diff = initial_diff(path, 'link', prev_state)
changed = False
if prev_state in ('hard', 'file', 'directory', 'absent'):
changed = True
elif prev_state == 'link':
b_old_src = os.readlink(b_path)
if b_old_src != b_src:
diff['before']['src'] = to_native(b_old_src, errors='strict')
diff['after']['src'] = src
changed = True
else:
raise AnsibleModuleError(results={'msg': 'unexpected position reached', 'dest': path, 'src': src})
if changed and not module.check_mode:
if prev_state != 'absent':
# try to replace atomically
b_tmppath = to_bytes(os.path.sep).join(
[os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))]
)
try:
if prev_state == 'directory':
os.rmdir(b_path)
os.symlink(b_src, b_tmppath)
os.rename(b_tmppath, b_path)
except OSError as e:
if os.path.exists(b_tmppath):
os.unlink(b_tmppath)
raise AnsibleModuleError(results={'msg': 'Error while replacing: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
else:
try:
os.symlink(b_src, b_path)
except OSError as e:
raise AnsibleModuleError(results={'msg': 'Error while linking: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
if module.check_mode and not os.path.exists(b_path):
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
# Now that we might have created the symlink, get the arguments.
# We need to do it now so we can properly follow the symlink if needed
# because load_file_common_arguments sets 'path' according
# the value of follow and the symlink existence.
file_args = module.load_file_common_arguments(module.params)
# Whenever we create a link to a nonexistent target we know that the nonexistent target
# cannot have any permissions set on it. Skip setting those and emit a warning (the user
# can set follow=False to remove the warning)
if follow and os.path.islink(b_path) and not os.path.exists(file_args['path']):
module.warn('Cannot set fs attributes on a non-existent symlink target. follow should be'
' set to False to avoid this.')
else:
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
def ensure_hardlink(path, src, follow, force, timestamps):
b_path = to_bytes(path, errors='surrogate_or_strict')
b_src = to_bytes(src, errors='surrogate_or_strict')
prev_state = get_state(b_path)
file_args = module.load_file_common_arguments(module.params)
mtime = get_timestamp_for_time(timestamps['modification_time'], timestamps['modification_time_format'])
atime = get_timestamp_for_time(timestamps['access_time'], timestamps['access_time_format'])
# src is the source of a hardlink. We require it if we are creating a new hardlink.
# We require path in the argument_spec so we know it is present at this point.
if src is None:
raise AnsibleModuleError(results={'msg': 'src is required for creating new hardlinks'})
if not os.path.exists(b_src):
raise AnsibleModuleError(results={'msg': 'src does not exist', 'dest': path, 'src': src})
diff = initial_diff(path, 'hard', prev_state)
changed = False
if prev_state == 'absent':
changed = True
elif prev_state == 'link':
b_old_src = os.readlink(b_path)
if b_old_src != b_src:
diff['before']['src'] = to_native(b_old_src, errors='strict')
diff['after']['src'] = src
changed = True
elif prev_state == 'hard':
if not os.stat(b_path).st_ino == os.stat(b_src).st_ino:
changed = True
if not force:
raise AnsibleModuleError(results={'msg': 'Cannot link, different hard link exists at destination',
'dest': path, 'src': src})
elif prev_state == 'file':
changed = True
if not force:
raise AnsibleModuleError(results={'msg': 'Cannot link, %s exists at destination' % prev_state,
'dest': path, 'src': src})
elif prev_state == 'directory':
changed = True
if os.path.exists(b_path):
if os.stat(b_path).st_ino == os.stat(b_src).st_ino:
return {'path': path, 'changed': False}
elif not force:
raise AnsibleModuleError(results={'msg': 'Cannot link: different hard link exists at destination',
'dest': path, 'src': src})
else:
raise AnsibleModuleError(results={'msg': 'unexpected position reached', 'dest': path, 'src': src})
if changed and not module.check_mode:
if prev_state != 'absent':
# try to replace atomically
b_tmppath = to_bytes(os.path.sep).join(
[os.path.dirname(b_path), to_bytes(".%s.%s.tmp" % (os.getpid(), time.time()))]
)
try:
if prev_state == 'directory':
if os.path.exists(b_path):
try:
os.unlink(b_path)
except OSError as e:
if e.errno != errno.ENOENT: # It may already have been removed
raise
os.link(b_src, b_tmppath)
os.rename(b_tmppath, b_path)
except OSError as e:
if os.path.exists(b_tmppath):
os.unlink(b_tmppath)
raise AnsibleModuleError(results={'msg': 'Error while replacing: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
else:
try:
os.link(b_src, b_path)
except OSError as e:
raise AnsibleModuleError(results={'msg': 'Error while linking: %s'
% to_native(e, nonstring='simplerepr'),
'path': path})
if module.check_mode and not os.path.exists(b_path):
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
changed = module.set_fs_attributes_if_different(file_args, changed, diff, expand=False)
changed |= update_timestamp_for_file(file_args['path'], mtime, atime, diff)
return {'dest': path, 'src': src, 'changed': changed, 'diff': diff}
def main():
global module
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', choices=['absent', 'directory', 'file', 'hard', 'link', 'touch']),
path=dict(type='path', required=True, aliases=['dest', 'name']),
_original_basename=dict(type='str'), # Internal use only, for recursive ops
recurse=dict(type='bool', default=False),
force=dict(type='bool', default=False), # Note: Should not be in file_common_args in future
follow=dict(type='bool', default=True), # Note: Different default than file_common_args
_diff_peek=dict(type='bool'), # Internal use only, for internal checks in the action plugins
src=dict(type='path'), # Note: Should not be in file_common_args in future
modification_time=dict(type='str'),
modification_time_format=dict(type='str', default='%Y%m%d%H%M.%S'),
access_time=dict(type='str'),
access_time_format=dict(type='str', default='%Y%m%d%H%M.%S'),
),
add_file_common_args=True,
supports_check_mode=True,
)
# When we rewrite basic.py, we will do something similar to this on instantiating an AnsibleModule
sys.excepthook = _ansible_excepthook
additional_parameter_handling(module.params)
params = module.params
state = params['state']
recurse = params['recurse']
force = params['force']
follow = params['follow']
path = params['path']
src = params['src']
timestamps = {}
timestamps['modification_time'] = keep_backward_compatibility_on_timestamps(params['modification_time'], state)
timestamps['modification_time_format'] = params['modification_time_format']
timestamps['access_time'] = keep_backward_compatibility_on_timestamps(params['access_time'], state)
timestamps['access_time_format'] = params['access_time_format']
# short-circuit for diff_peek
if params['_diff_peek'] is not None:
appears_binary = execute_diff_peek(to_bytes(path, errors='surrogate_or_strict'))
module.exit_json(path=path, changed=False, appears_binary=appears_binary)
if state == 'file':
result = ensure_file_attributes(path, follow, timestamps)
elif state == 'directory':
result = ensure_directory(path, follow, recurse, timestamps)
elif state == 'link':
result = ensure_symlink(path, src, follow, force, timestamps)
elif state == 'hard':
result = ensure_hardlink(path, src, follow, force, timestamps)
elif state == 'touch':
result = execute_touch(path, follow, timestamps)
elif state == 'absent':
result = ensure_absent(path)
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|> | params['path'] = os.path.join(params['path'], basename)
# state should default to file, but since that creates many conflicts,
# default state to 'current' when it exists. |
<|file_name|>x-point.ts<|end_file_name|><|fim▁begin|>import { Point } from "./point";
export class XPoint extends Point {
constructor(wording: string) {
super(wording);
this.text.anchor.x = 0.5;<|fim▁hole|> }
public draw(): void {
this.clear();
this.lineStyle(1, 0x0000FF, 1);
this.moveTo(0, 0);
this.lineTo(0, 100);
this.endFill();
}
}<|fim▁end|> | |
<|file_name|>2_6_lists_functions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# recurso de referencia: http://www.tutorialspoint.com/python/python_lists.htm
lista_de_asignaturas_teoricas = [ 'matematicas', 'lengua castellana']
# el la funcion len() es usado para obtener la cantidad de elementos en una lista
print "cantidad de elementos en la lista lista_de_asignaturas_teoricas"
print len(lista_de_asignaturas_teoricas)
lista_de_numeros = [1, 2, 3, 4, 2, 3, 7, 8]
# hallar elemento de mayor valor en una lista con numeros
print "elemento de mayor valor"
print max(lista_de_numeros)
# 8
<|fim▁hole|><|fim▁end|> | # hallar elemento de menor valor en una lista con numeros
print "elemento de menor valor"
print min(lista_de_numeros)
# 1 |
<|file_name|>GitHasVersionRule.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013-2014 Will Thames <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
<|fim▁hole|>class GitHasVersionRule(AnsibleLintRule):
id = '401'
shortdesc = 'Git checkouts must contain explicit version'
description = (
'All version control checkouts must point to '
'an explicit commit or tag, not just ``latest``'
)
severity = 'MEDIUM'
tags = ['module', 'repeatability', 'ANSIBLE0004']
version_added = 'historic'
def matchtask(self, file, task):
return (task['action']['__ansible_module__'] == 'git' and
task['action'].get('version', 'HEAD') == 'HEAD')<|fim▁end|> | from ansiblelint.rules import AnsibleLintRule
|
<|file_name|>board.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------#
# #
# This file is part of the Horus Project #
# #
# Copyright (C) 2014-2015 Mundo Reader S.L. #
# #<|fim▁hole|># it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
#-----------------------------------------------------------------------#
__author__ = "Jesús Arroyo Torrens <[email protected]>"
__license__ = "GNU General Public License v2 http://www.gnu.org/licenses/gpl.html"
import time
import serial
import threading
class Error(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class WrongFirmware(Error):
def __init__(self, msg="WrongFirmware"):
super(Error, self).__init__(msg)
class BoardNotConnected(Error):
def __init__(self, msg="BoardNotConnected"):
super(Error, self).__init__(msg)
class Board:
"""Board class. For accessing to the scanner board"""
"""
Gcode commands:
G1 Fnnn : feed rate
G1 Xnnn : move motor
M70 Tn : switch off laser n
M71 Tn : switch on laser n
"""
def __init__(self, parent=None, serialName='/dev/ttyUSB0', baudRate=115200):
self.parent = parent
self.serialName = serialName
self.baudRate = baudRate
self.serialPort = None
self.isConnected = False
self.unplugCallback = None
self._position = 0
self._direction = 1
self._n = 0 # Check if command fails
def setSerialName(self, serialName):
self.serialName = serialName
def setBaudRate(self, baudRate):
self.baudRate = baudRate
def setInvertMotor(self, invertMotor):
if invertMotor:
self._direction = -1
else:
self._direction = +1
def setUnplugCallback(self, unplugCallback=None):
self.unplugCallback = unplugCallback
def connect(self):
""" Opens serial port and performs handshake"""
print ">>> Connecting board {0} {1}".format(self.serialName, self.baudRate)
self.isConnected = False
try:
self.serialPort = serial.Serial(self.serialName, self.baudRate, timeout=2)
if self.serialPort.isOpen():
#-- Force Reset and flush
self._reset()
version = self.serialPort.readline()
if version == "Horus 0.1 ['$' for help]\r\n":
self.setSpeedMotor(1)
self.setAbsolutePosition(0)
self.serialPort.timeout = 0.05
print ">>> Done"
self.isConnected = True
else:
raise WrongFirmware()
else:
raise BoardNotConnected()
except:
print "Error opening the port {0}\n".format(self.serialName)
self.serialPort = None
raise BoardNotConnected()
def disconnect(self):
""" Closes serial port """
if self.isConnected:
print ">>> Disconnecting board {0}".format(self.serialName)
try:
if self.serialPort is not None:
self.setLeftLaserOff()
self.setRightLaserOff()
self.disableMotor()
self.serialPort.close()
del self.serialPort
except serial.SerialException:
print "Error closing the port {0}\n".format(self.serialName)
print ">>> Error"
self.isConnected = False
print ">>> Done"
def enableMotor(self):
return self._sendCommand("M17")
def disableMotor(self):
return self._sendCommand("M18")
def setSpeedMotor(self, feedRate):
self.feedRate = feedRate
return self._sendCommand("G1F{0}".format(self.feedRate))
def setAccelerationMotor(self, acceleration):
self.acceleration = acceleration
return self._sendCommand("$120={0}".format(self.acceleration))
def setRelativePosition(self, pos):
self._posIncrement = pos
def setAbsolutePosition(self, pos):
self._posIncrement = 0
self._position = pos
def moveMotor(self, nonblocking=False, callback=None):
self._position += self._posIncrement * self._direction
return self._sendCommand("G1X{0}".format(self._position), nonblocking, callback)
def setRightLaserOn(self):
return self._sendCommand("M71T2")
def setLeftLaserOn(self):
return self._sendCommand("M71T1")
def setRightLaserOff(self):
return self._sendCommand("M70T2")
def setLeftLaserOff(self):
return self._sendCommand("M70T1")
def getLDRSensor(self, pin):
value = self.sendRequest("M50T"+pin, readLines=True).split("\n")[0]
try:
return int(value)
except ValueError:
return 0
def sendRequest(self, req, nonblocking=False, callback=None, readLines=False):
if nonblocking:
threading.Thread(target=self._sendRequest, args=(req, callback, readLines)).start()
else:
return self._sendRequest(req, callback, readLines)
def _sendRequest(self, req, callback=None, readLines=False):
"""Sends the request and returns the response"""
ret = ''
if self.isConnected and req != '':
if self.serialPort is not None and self.serialPort.isOpen():
try:
self.serialPort.flushInput()
self.serialPort.flushOutput()
self.serialPort.write(req+"\r\n")
while ret == '': # TODO: add timeout
if readLines:
ret = ''.join(self.serialPort.readlines())
else:
ret = ''.join(self.serialPort.readline())
time.sleep(0.01)
self._success()
except:
if callback is not None:
callback(ret)
self._fail()
else:
self._fail()
if callback is not None:
callback(ret)
return ret
def _success(self):
self._n = 0
def _fail(self):
self._n += 1
if self._n >= 1:
self._n = 0
if self.unplugCallback is not None and \
self.parent is not None and not self.parent.unplugged:
self.parent.unplugged = True
self.unplugCallback()
def _checkAcknowledge(self, ack):
if ack is not None:
return ack.endswith("ok\r\n")
else:
return False
def _sendCommand(self, cmd, nonblocking=False, callback=None):
if nonblocking:
self.sendRequest(cmd, nonblocking, callback)
else:
return self._checkAcknowledge(self._sendRequest(cmd))
def _reset(self):
self.serialPort.flushInput()
self.serialPort.flushOutput()
self.serialPort.write("\x18\r\n") # Ctrl-x
self.serialPort.readline()<|fim▁end|> | # Date: August, November 2014 #
# Author: Jesús Arroyo Torrens <[email protected]> #
# #
# This program is free software: you can redistribute it and/or modify # |
<|file_name|>partial.rs<|end_file_name|><|fim▁begin|>use std::borrow::Cow;
use std::collections::HashMap;
use serde_json::value::Value as Json;
use crate::block::BlockContext;
use crate::context::{merge_json, Context};
use crate::error::RenderError;
use crate::json::path::Path;
use crate::output::Output;
use crate::registry::Registry;
use crate::render::{Decorator, Evaluable, RenderContext, Renderable};
use crate::template::Template;
pub(crate) const PARTIAL_BLOCK: &str = "@partial-block";
fn find_partial<'reg: 'rc, 'rc: 'a, 'a>(
rc: &'a RenderContext<'reg, 'rc>,
r: &'reg Registry<'reg>,
d: &Decorator<'reg, 'rc>,
name: &str,
) -> Result<Option<Cow<'a, Template>>, RenderError> {
if let Some(partial) = rc.get_partial(name) {
return Ok(Some(Cow::Borrowed(partial)));
}
if let Some(tpl) = r.get_or_load_template_optional(name) {
return tpl.map(Option::Some);
}
if let Some(tpl) = d.template() {
return Ok(Some(Cow::Borrowed(tpl)));
}
Ok(None)
}
pub fn expand_partial<'reg: 'rc, 'rc>(
d: &Decorator<'reg, 'rc>,
r: &'reg Registry<'reg>,
ctx: &'rc Context,
rc: &mut RenderContext<'reg, 'rc>,
out: &mut dyn Output,
) -> Result<(), RenderError> {
// try eval inline partials first
if let Some(t) = d.template() {
t.eval(r, ctx, rc)?;
}
let tname = d.name();
if rc.is_current_template(tname) {
return Err(RenderError::new("Cannot include self in >"));
}
let partial = find_partial(rc, r, d, tname)?;
if let Some(t) = partial {
// clone to avoid lifetime issue
// FIXME refactor this to avoid
let mut local_rc = rc.clone();
// if tname == PARTIAL_BLOCK
let is_partial_block = tname == PARTIAL_BLOCK;
// add partial block depth there are consecutive partial
// blocks in the stack.
if is_partial_block {
local_rc.inc_partial_block_depth();
} else {
// depth cannot be lower than 0, which is guaranted in the
// `dec_partial_block_depth` method
local_rc.dec_partial_block_depth();
}
let mut block_created = false;
if let Some(base_path) = d.param(0).and_then(|p| p.context_path()) {
// path given, update base_path
let mut block = BlockContext::new();
*block.base_path_mut() = base_path.to_vec();
block_created = true;
// clear blocks to prevent block params from parent
// template to be leaked into partials
local_rc.clear_blocks();
local_rc.push_block(block);
} else if !d.hash().is_empty() {
let mut block = BlockContext::new();
// hash given, update base_value
let hash_ctx = d
.hash()
.iter()
.map(|(k, v)| (*k, v.value()))
.collect::<HashMap<&str, &Json>>();
let merged_context = merge_json(
local_rc.evaluate2(ctx, &Path::current())?.as_json(),
&hash_ctx,
);
block.set_base_value(merged_context);
block_created = true;
// clear blocks to prevent block params from parent
// template to be leaked into partials
// see `test_partial_context_issue_495` for the case.
local_rc.clear_blocks();
local_rc.push_block(block);
}
// @partial-block
if let Some(pb) = d.template() {
local_rc.push_partial_block(pb);
}
let result = t.render(r, ctx, &mut local_rc, out);
// cleanup
if block_created {
local_rc.pop_block();
}
if d.template().is_some() {
local_rc.pop_partial_block();
}
result
} else {
Ok(())
}
}
#[cfg(test)]
mod test {
use crate::context::Context;
use crate::error::RenderError;
use crate::output::Output;
use crate::registry::Registry;
use crate::render::{Helper, RenderContext};
#[test]
fn test() {
let mut handlebars = Registry::new();
assert!(handlebars
.register_template_string("t0", "{{> t1}}")
.is_ok());
assert!(handlebars
.register_template_string("t1", "{{this}}")
.is_ok());
assert!(handlebars
.register_template_string("t2", "{{#> t99}}not there{{/t99}}")
.is_ok());
assert!(handlebars
.register_template_string("t3", "{{#*inline \"t31\"}}{{this}}{{/inline}}{{> t31}}")
.is_ok());
assert!(handlebars
.register_template_string(
"t4",
"{{#> t5}}{{#*inline \"nav\"}}navbar{{/inline}}{{/t5}}"
)
.is_ok());
assert!(handlebars
.register_template_string("t5", "include {{> nav}}")
.is_ok());
assert!(handlebars
.register_template_string("t6", "{{> t1 a}}")
.is_ok());
assert!(handlebars
.register_template_string(
"t7",
"{{#*inline \"t71\"}}{{a}}{{/inline}}{{> t71 a=\"world\"}}"
)
.is_ok());
assert!(handlebars.register_template_string("t8", "{{a}}").is_ok());
assert!(handlebars
.register_template_string("t9", "{{> t8 a=2}}")
.is_ok());
assert_eq!(handlebars.render("t0", &1).ok().unwrap(), "1".to_string());
assert_eq!(
handlebars.render("t2", &1).ok().unwrap(),
"not there".to_string()
);
assert_eq!(handlebars.render("t3", &1).ok().unwrap(), "1".to_string());
assert_eq!(
handlebars.render("t4", &1).ok().unwrap(),
"include navbar".to_string()
);
assert_eq!(
handlebars
.render("t6", &btreemap! {"a".to_string() => "2".to_string()})
.ok()
.unwrap(),
"2".to_string()
);
assert_eq!(
handlebars.render("t7", &1).ok().unwrap(),
"world".to_string()
);
assert_eq!(handlebars.render("t9", &1).ok().unwrap(), "2".to_string());
}
#[test]
fn test_include_partial_block() {
let t0 = "hello {{> @partial-block}}";
let t1 = "{{#> t0}}inner {{this}}{{/t0}}";
let mut handlebars = Registry::new();
assert!(handlebars.register_template_string("t0", t0).is_ok());
assert!(handlebars.register_template_string("t1", t1).is_ok());
let r0 = handlebars.render("t1", &true);
assert_eq!(r0.ok().unwrap(), "hello inner true".to_string());
}
#[test]
fn test_self_inclusion() {
let t0 = "hello {{> t1}} {{> t0}}";
let t1 = "some template";
let mut handlebars = Registry::new();
assert!(handlebars.register_template_string("t0", t0).is_ok());
assert!(handlebars.register_template_string("t1", t1).is_ok());
let r0 = handlebars.render("t0", &true);
assert!(r0.is_err());
}
#[test]
fn test_issue_143() {
let main_template = "one{{> two }}three{{> two }}";
let two_partial = "--- two ---";
let mut handlebars = Registry::new();
assert!(handlebars
.register_template_string("template", main_template)
.is_ok());
assert!(handlebars
.register_template_string("two", two_partial)
.is_ok());
let r0 = handlebars.render("template", &true);
assert_eq!(r0.ok().unwrap(), "one--- two ---three--- two ---");
}
#[test]
fn test_hash_context_outscope() {
let main_template = "In: {{> p a=2}} Out: {{a}}";
let p_partial = "{{a}}";
let mut handlebars = Registry::new();
assert!(handlebars
.register_template_string("template", main_template)
.is_ok());
assert!(handlebars.register_template_string("p", p_partial).is_ok());
let r0 = handlebars.render("template", &true);
assert_eq!(r0.ok().unwrap(), "In: 2 Out: ");
}
#[test]
fn test_partial_context_hash() {
let mut hbs = Registry::new();
hbs.register_template_string("one", "This is a test. {{> two name=\"fred\" }}")
.unwrap();
hbs.register_template_string("two", "Lets test {{name}}")
.unwrap();
assert_eq!(
"This is a test. Lets test fred",
hbs.render("one", &0).unwrap()
);
}<|fim▁hole|>
#[test]
fn test_partial_subexpression_context_hash() {
let mut hbs = Registry::new();
hbs.register_template_string("one", "This is a test. {{> (x @root) name=\"fred\" }}")
.unwrap();
hbs.register_template_string("two", "Lets test {{name}}")
.unwrap();
hbs.register_helper(
"x",
Box::new(
|_: &Helper<'_, '_>,
_: &Registry<'_>,
_: &Context,
_: &mut RenderContext<'_, '_>,
out: &mut dyn Output|
-> Result<(), RenderError> {
out.write("two")?;
Ok(())
},
),
);
assert_eq!(
"This is a test. Lets test fred",
hbs.render("one", &0).unwrap()
);
}
#[test]
fn test_nested_partial_scope() {
let t = "{{#*inline \"pp\"}}{{a}} {{b}}{{/inline}}{{#each c}}{{> pp a=2}}{{/each}}";
let data = json!({"c": [{"b": true}, {"b": false}]});
let mut handlebars = Registry::new();
assert!(handlebars.register_template_string("t", t).is_ok());
let r0 = handlebars.render("t", &data);
assert_eq!(r0.ok().unwrap(), "2 true2 false");
}
#[test]
fn test_nested_partial_block() {
let mut handlebars = Registry::new();
let template1 = "<outer>{{> @partial-block }}</outer>";
let template2 = "{{#> t1 }}<inner>{{> @partial-block }}</inner>{{/ t1 }}";
let template3 = "{{#> t2 }}Hello{{/ t2 }}";
handlebars
.register_template_string("t1", &template1)
.unwrap();
handlebars
.register_template_string("t2", &template2)
.unwrap();
let page = handlebars.render_template(&template3, &json!({})).unwrap();
assert_eq!("<outer><inner>Hello</inner></outer>", page);
}
#[test]
fn test_up_to_partial_level() {
let outer = r#"{{>inner name="fruit:" vegetables=fruits}}"#;
let inner = "{{#each vegetables}}{{../name}} {{this}},{{/each}}";
let data = json!({ "fruits": ["carrot", "tomato"] });
let mut handlebars = Registry::new();
handlebars.register_template_string("outer", outer).unwrap();
handlebars.register_template_string("inner", inner).unwrap();
assert_eq!(
handlebars.render("outer", &data).unwrap(),
"fruit: carrot,fruit: tomato,"
);
}
#[test]
fn line_stripping_with_inline_and_partial() {
let tpl0 = r#"{{#*inline "foo"}}foo
{{/inline}}
{{> foo}}
{{> foo}}
{{> foo}}"#;
let tpl1 = r#"{{#*inline "foo"}}foo{{/inline}}
{{> foo}}
{{> foo}}
{{> foo}}"#;
let hbs = Registry::new();
assert_eq!(
r#"foo
foo
foo
"#,
hbs.render_template(tpl0, &json!({})).unwrap()
);
assert_eq!(
r#"
foofoofoo"#,
hbs.render_template(tpl1, &json!({})).unwrap()
);
}
#[test]
fn test_partial_indent() {
let outer = r#" {{> inner inner_solo}}
{{#each inners}}
{{> inner}}
{{/each}}
{{#each inners}}
{{> inner}}
{{/each}}
"#;
let inner = r#"name: {{name}}
"#;
let mut hbs = Registry::new();
hbs.register_template_string("inner", inner).unwrap();
hbs.register_template_string("outer", outer).unwrap();
let result = hbs
.render(
"outer",
&json!({
"inner_solo": {"name": "inner_solo"},
"inners": [
{"name": "hello"},
{"name": "there"}
]
}),
)
.unwrap();
assert_eq!(
result,
r#" name: inner_solo
name: hello
name: there
name: hello
name: there
"#
);
}
// Rule::partial_expression should not trim leading indent by default
#[test]
fn test_partial_prevent_indent() {
let outer = r#" {{> inner inner_solo}}
{{#each inners}}
{{> inner}}
{{/each}}
{{#each inners}}
{{> inner}}
{{/each}}
"#;
let inner = r#"name: {{name}}
"#;
let mut hbs = Registry::new();
hbs.set_prevent_indent(true);
hbs.register_template_string("inner", inner).unwrap();
hbs.register_template_string("outer", outer).unwrap();
let result = hbs
.render(
"outer",
&json!({
"inner_solo": {"name": "inner_solo"},
"inners": [
{"name": "hello"},
{"name": "there"}
]
}),
)
.unwrap();
assert_eq!(
result,
r#"name: inner_solo
name: hello
name: there
name: hello
name: there
"#
);
}
#[test]
fn test_nested_partials() {
let mut hb = Registry::new();
hb.register_template_string("partial", "{{> @partial-block}}")
.unwrap();
hb.register_template_string(
"index",
r#"{{#>partial}}
Yo
{{#>partial}}
Yo 2
{{/partial}}
{{/partial}}"#,
)
.unwrap();
assert_eq!(
r#" Yo
Yo 2
"#,
hb.render("index", &()).unwrap()
);
hb.register_template_string("partial2", "{{> @partial-block}}")
.unwrap();
let r2 = hb
.render_template(
r#"{{#> partial}}
{{#> partial2}}
:(
{{/partial2}}
{{/partial}}"#,
&(),
)
.unwrap();
assert_eq!(":(\n", r2);
}
#[test]
fn test_partial_context_issue_495() {
let mut hb = Registry::new();
hb.register_template_string(
"t1",
r#"{{~#*inline "displayName"~}}
Template:{{name}}
{{/inline}}
{{#each data as |name|}}
Name:{{name}}
{{>displayName name="aaaa"}}
{{/each}}"#,
)
.unwrap();
hb.register_template_string(
"t1",
r#"{{~#*inline "displayName"~}}
Template:{{this}}
{{/inline}}
{{#each data as |name|}}
Name:{{name}}
{{>displayName}}
{{/each}}"#,
)
.unwrap();
let data = json!({
"data": ["hudel", "test"]
});
assert_eq!(
r#"Name:hudel
Template:hudel
Name:test
Template:test
"#,
hb.render("t1", &data).unwrap()
);
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (C) 2009-2010 Nicolas P. Rougier
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
'''
The atb module provides bindings for AntTweakBar which is a small and
easy-to-use C/C++ library that allows programmers to quickly add a light and
intuitive graphical user interface into graphic applications based on OpenGL,
DirectX 9 or DirectX 10 to interactively tweak their parameters on-screen.
'''
import ctypes
from raw import *
def check_error(status, error=0):
if status == error:
raise Exception(TwGetLastError())
else:
return status
def enum(name, values):
E = (TwEnumVal*len(values))()
for i,(label,value) in enumerate(values.iteritems()):
E[i].Value, E[i].Label = value, label
return check_error(TwDefineEnum(name, E, len(values)))
def init():
check_error(TwInit(TW_OPENGL, 0))
def _dict_to_defs(args):
'''
Converts a dictionary like {a:'b', 1:2} to the string "a=b 1=2" suitable
for passing to define method. Automatic type conversion is done as follows:
- if the value is bool result is simply the name of the string eg
{'closed':True} -> "closed"
- if the value is a tuple the items are converted to strings and joined
by spaces, eg {'size':(10, 20)} -> "size='10 20'"
'''
r = []
for k, v in args.iteritems():
if type(v) is bool: v = ""
elif type(v) is tuple: v = "='%s'" % " ".join((str(i) for i in v))
else: v = "='%s'" % str(v)
r.append(k+v)
return " ".join(r)
def draw():
check_error(TwDraw())
def terminate():
check_error(TwTerminate())
class Bar(object):
'''
Bar is an internal structure used to store tweak bar attributes and
states.
'''
def __init__(self, name=None, **defs):
'''
Create a new bar.
Arguments:
----------
name : str
Name of the new bar.
Keyword arguments:
------------------
label : str
Changes the label of a bar, that is the title displayed on top of a
bar. By default, the label is the name used when the bar was
created.
help : str
Defines the help message associated to a bar. This message will be
displayed inside the Help bar automatically created to help the
user.
You can also define a global help message. It will be displayed at
the beginning of the Help bar. To define it, use the GLOBAL keyword
instead of the bar name.
color : (int,int,int)
Changes the color of a bar to (red,green,blue).
red, green and blue are integer values between 0 and 255 that
define the red, green and blue color channels. See also the alpha
and text parameters to change bar visual aspect.
alpha : int
Changes the bar opacity.
Bar opacity can vary from 0 for fully transparent to 255 for fully
opaque. See also the color and text parameters to change bar visual
aspect.
text : str
Changes text color to 'dark' or 'light'.
Depending on your application background color and on bar color and
alpha, bar text might be more readable if it is dark or light. This
parameter allows to switch between the two modes. See also the
color and alpha parameters to change bar visual aspect.
position : (int,int)
Move a bar to a new position (x,y).
x and y are positive integer values that represent the new position
of the bar in pixels. (x=0, y=0) is upper-left corner of the
application window.
size : (int,int)
Change the bar size to (sx,sy).
sx and sy are positive integer values that represent the new size
of the bar in pixels.
valueswidth : int
Change the width 'w' of the bar right column used to display numerical
values.
w is a positive integer that represents width in pixels.
refresh : float
Change the refresh rate 'r' of the bar.
Values displayed by a bar are automatically updated to reflect
changes of their associated variables. r is a real value
corresponding to the number of seconds between two updates.
fontsize : int
Change the size 's' of the font used by the bars.
s is 1 for small font, 2 for medium font, or 3 for large font. Note
that all bars share the same font, so this change is applied to all
bars.
visible : bool
Show or hide a tweak bar.
iconified : bool
Iconify or deiconify a tweak bar.
iconpos : str
Changes the place where icons of iconified bars are displayed.
p is one of the following values:
- 'bottomleft' or 'bl' for bottom-left corner of the window (default).
- 'bottomright' or 'br' for bottom-right corner of the window.
- 'topleft' or 'tl' for top-left corner of the window.
- 'topright' or 'tr' for top-right corner of the window.
Note that this parameter is applied to all bar icons.
iconalign : str
Changes the alignment of icons of iconified bars. It can be
'vertical' (the default), or 'horizontal'.
Note that this parameter is applied to all bar icons.
iconmargin : (int,int)
Add a margin (x,y) between borders of the window and icons of
iconified bars. x and y are the number of pixels between window
borders and icons in the x and y directions respectively.
Note that this parameter is applied to all bar icons.
iconifiable : bool
Allow a bar to be iconified or not by the user.
movable : bool
Allow a bar to be moved or not by the user.
resizable : bool
Allow a bar to be resized or not by the user.
fontresizable : bool
Allow bar fonts to be resized or not by the user.
Note that this parameter is applied to all bars.
alwaystop : bool
Set a bar to be always on top of the others.
alwaysbottom : bool
Set a bar to be always behind the others.
'''
if not name:
name = "Unnamed"
self._name = name
self._bar = TwNewBar(name)
if defs:
self.define(_dict_to_defs(defs))
self._c_callbacks = []
def _get_name(self):
return self._name
name = property(_get_name,
doc='''Name of the bar''')
def _set_label(self, label):
c = ctypes.c_char_p(label)
TwSetParam(self._bar, "", "label", PARAM_CSTRING, 1, c)
def _get_label(self):
c = ctypes.create_string_buffer(4096)
TwGetParam(self._bar, "", "label", PARAM_CSTRING, 4095, c)
return c.value
label = property(_get_label, _set_label,
doc='''Bar label.
Changes the label of a bar, that is the title displayed on top of a bar.
By default, the label is the name used when the bar was created.
:type: str
''')
def _set_alpha(self, alpha):
c = ctypes.c_int(alpha)
TwSetParam(self._bar, "", "alpha", PARAM_INT32, 1, ctypes.byref(c))
def _get_alpha(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "alpha", PARAM_INT32, 1, ctypes.byref(c))
return c.value
alpha = property(_get_alpha, _set_alpha,
doc='''Bar opacity.
Bar opacity can vary from 0 for fully transparent to 255 for fully opaque.
See also the color and text parameters to change bar visual aspect.
:type: int
''')
def _set_color(self, color):
c = (ctypes.c_int*3)(color[0],color[1],color[2])
TwSetParam(self._bar, "", "color", PARAM_INT32, 3, ctypes.byref(c))
def _get_color(self):
c = (ctypes.c_int*3)(0,0,0)
TwGetParam(self._bar, "", "color", PARAM_INT32, 3, ctypes.byref(c))
return c[0], c[1], c[2]
color = property(_get_color, _set_color,
doc='''Bar color.
Red, green and blue are integer values between 0 and 255 that define the
red, green and blue color channels. See also the alpha and text parameters
to change bar visual aspect.
:type: (int,int,int)
''')
def _set_help(self, help):
c = ctypes.c_char_p(help)
TwSetParam(self._bar, "", "help", PARAM_CSTRING, 1, c)
def _get_help(self):
c = ctypes.create_string_buffer(4096)
TwGetParam(self._bar, "", "help", PARAM_CSTRING, 4095, c)
return c.value
help = property(_get_help, _set_help,
doc='''Help message.
Defines the help message associated to a bar. This message will be
displayed inside the Help bar automatically created to help the user.
:type: str
''')
def _set_text(self, text):
c = ctypes.c_char_p(text)
TwSetParam(self._bar, "", "text", PARAM_CSTRING, 1, c)
def _get_text(self):
c = ctypes.create_string_buffer(16)
TwGetParam(self._bar, "", "text", PARAM_CSTRING, 15, c)
return c.value
text = property(_get_text, _set_text,
doc='''Text color.
Depending on your application background color and on bar color and alpha,
bar text might be more readable if it is dark or light. This parameter
allows to switch between the two modes. See also the color and alpha
parameters to change bar visual aspect.
:type: str
''')
def _set_position(self, position):
c = (ctypes.c_int*2)(position[0],position[1])
TwSetParam(self._bar, "", "position", PARAM_INT32, 2, ctypes.byref(c))
def _get_position(self):
c = (ctypes.c_int*2)(0,0)
TwGetParam(self._bar, "", "position", PARAM_INT32, 2, ctypes.byref(c))
return c[0], c[1]
position = property(_get_position, _set_position,
doc='''Bar position (x,y).
x and y are positive integer values that represent the new position of the
bar in pixels. (x=0, y=0) is upper-left corner of the application window.
:type: (int,int)
''')
def _set_size(self, size):
c = (ctypes.c_int*2)(size[0],size[1])
TwSetParam(self._bar, "", "size", PARAM_INT32, 2, ctypes.byref(c))
def _get_size(self):
c = (ctypes.c_int*2)(0,0)
TwGetParam(self._bar, "", "size", PARAM_INT32, 2, ctypes.byref(c))
return c[0], c[1]
size = property(_get_size, _set_size,
doc='''Bar size (sx,sy).
sx and sy are positive integer values that represent the new size of the bar
in pixels.
:type: (int,int)
''')
def _set_valuewidth(self, valuewidth):
c = ctypes.c_int(valuewidth)
TwSetParam(self._bar, "", "valuewidth", PARAM_INT32, 1, ctypes.byref(c))
def _get_valuewidth(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "valuewidth", PARAM_INT32, 1, ctypes.byref(c))
return c.value
valuewidth = property(_get_valuewidth, _set_valuewidth,
doc='''Value width.
Width of the bar right column used to display numerical values.
:type: int
''')
def _set_fontsize(self, fontsize):
c = ctypes.c_int(fontsize)
TwSetParam(self._bar, "", "fontsize", PARAM_INT32, 1, ctypes.byref(c))
def _get_fontsize(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "fontsize", PARAM_INT32, 1, ctypes.byref(c))
return c.value
fontsize = property(_get_fontsize, _set_fontsize,
doc='''Font size s.
s is 1 for small font, 2 for medium font, or 3 for large font. Note that
all bars share the same font, so this change is applied to all bars.
fontsize is a global parameter.
:type: int
''')
def _set_refresh(self, refresh):
c = ctypes.c_float(refresh)
TwSetParam(self._bar, "", "refresh", PARAM_FLOAT, 1, ctypes.byref(c))
def _get_refresh(self):
c = ctypes.c_float(0)
TwGetParam(self._bar, "", "refresh", PARAM_FLOAT, 1, ctypes.byref(c))
return c.value
refresh = property(_get_refresh, _set_refresh,
doc='''Refresh rate.
Values displayed by a bar are automatically updated to reflect changes of
their associated variables. r is a real value corresponding to the number
of seconds between two updates.
:type: float
''')
def _set_visible(self, visible):
c = ctypes.c_int(visible)
TwSetParam(self._bar, "", "visible", PARAM_INT32, 1, ctypes.byref(c))
def _get_visible(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "visible", PARAM_INT32, 1, ctypes.byref(c))
return c.value
visible = property(_get_visible, _set_visible,
doc='''Bar visibility.
See also show and hide method.
<|fim▁hole|>
def _set_iconified(self, iconified):
c = ctypes.c_int(iconified)
TwSetParam(self._bar, "", "iconified", PARAM_INT32, 1, ctypes.byref(c))
def _get_iconified(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "iconified", PARAM_INT32, 1, ctypes.byref(c))
return c.value
iconified = property(_get_iconified, _set_iconified,
doc='''Bar iconification.
Iconify or deiconify the bar.
:type: int
''')
def _set_iconpos(self, iconpos):
c = ctypes.c_char_p(iconpos)
TwSetParam(self._bar, "", "iconpos", PARAM_CSTRING, 1, c)
def _get_iconpos(self):
c = ctypes.create_string_buffer(32)
TwGetParam(self._bar, "", "iconpos", PARAM_CSTRING, 31, c)
return c.value
iconpos = property(_get_iconpos, _set_iconpos,
doc='''Bar icon position p.
p is one of the following values:
- 'bottomleft' or 'bl' for bottom-left corner of the window (default).
- 'bottomright' or 'br' for bottom-right corner of the window.
- 'topleft' or 'tl' for top-left corner of the window.
- 'topright' or 'tr' for top-right corner of the window.
Note that this parameter is applied to all bar icons.
:type: str
''')
def _set_iconalign(self, iconalign):
c = ctypes.c_char_p(iconalign)
TwSetParam(self._bar, "", "iconalign", PARAM_CSTRING, 1, c)
def _get_iconalign(self):
c = ctypes.create_string_buffer(32)
TwGetParam(self._bar, "", "iconalign", PARAM_CSTRING, 31, c)
return c.value
iconalign = property(_get_iconalign, _set_iconalign,
doc='''Bar icon alignment p.
Changes the alignment of icons of iconified bars. It can be 'vertical' (the
default), or 'horizontal'.
Note that this parameter is applied to all bar icons.
:type: str
''')
def _set_iconmargin(self, iconmargin):
c = (ctypes.c_int*2)(iconmargin[0],iconmargin[1])
TwSetParam(self._bar, "", "iconmargin", PARAM_INT32, 2, ctypes.byref(c))
def _get_iconmargin(self):
c = (ctypes.c_int*2)(0,0)
TwGetParam(self._bar, "", "iconmargin", PARAM_INT32, 2, ctypes.byref(c))
return c[0], c[1]
iconmargin = property(_get_iconmargin, _set_iconmargin,
doc='''Bar icon margin (x,y).
Add a margin between borders of the window and icons of iconified bars. x
and y are the number of pixels between window borders and icons in the x
and y directions respectively.
Note that this parameter is applied to all bar icons.
:type: (int,int)
''')
def _set_iconifiable(self, iconifiable):
c = ctypes.c_int(iconifiable)
TwSetParam(self._bar, "", "iconifiable", PARAM_INT32, 1, ctypes.byref(c))
def _get_iconifiable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "iconifiable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
iconifiable = property(_get_iconifiable, _set_iconifiable,
doc='''Allow a bar to be iconified or not by the user.
:type: int
''')
def _set_movable(self, movable):
c = ctypes.c_int(movable)
TwSetParam(self._bar, "", "movable", PARAM_INT32, 1, ctypes.byref(c))
def _get_movable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "movable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
movable = property(_get_movable, _set_movable,
doc='''Allow a bar to be moved or not by the user.
:type: int
''')
def _set_resizable(self, resizable):
c = ctypes.c_int(resizable)
TwSetParam(self._bar, "", "resizable", PARAM_INT32, 1, ctypes.byref(c))
def _get_resizable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "resizable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
resizable = property(_get_resizable, _set_resizable,
doc='''Allow a bar to be resized or not by the user.
:type: int
''')
def _set_fontresizable(self, fontresizable):
c = ctypes.c_int(fontresizable)
TwSetParam(self._bar, "", "fontresizable", PARAM_INT32, 1, ctypes.byref(c))
def _get_fontresizable(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "fontresizable", PARAM_INT32, 1, ctypes.byref(c))
return c.value
fontresizable = property(_get_fontresizable, _set_fontresizable,
doc='''Allow a bar to have font resized or not by the user.
:type: int
''')
def _set_alwaystop(self, alwaystop):
c = ctypes.c_int(alwaystop)
TwSetParam(self._bar, "", "alwaystop", PARAM_INT32, 1, ctypes.byref(c))
def _get_alwaystop(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "alwaystop", PARAM_INT32, 1, ctypes.byref(c))
return c.value
alwaystop = property(_get_alwaystop, _set_alwaystop,
doc='''Set a bar to be always on top of the others.
:type: int
''')
def _set_alwaybottom(self, alwaybottom):
c = ctypes.c_int(alwaybottom)
TwSetParam(self._bar, "", "alwaybottom", PARAM_INT32, 1, ctypes.byref(c))
def _get_alwaybottom(self):
c = ctypes.c_int(0)
TwGetParam(self._bar, "", "alwaybottom", PARAM_INT32, 1, ctypes.byref(c))
return c.value
alwaybottom = property(_get_alwaybottom, _set_alwaybottom,
doc='''Set a bar to be always behind the others.
:type: int
''')
def clear(self):
check_error(TwRemoveAllVars(self._bar))
def remove(self, name):
check_error(TwRemoveVar(self._bar, name))
def update(self):
check_error(TwRefreshBar(self._bar))
def bring_to_front(self):
check_error(TwSetTopBar(self._bar))
def add_var(self, name, value=None, vtype=None, readonly=False,
getter=None, setter=None, data=None, **defs):
'''
Add a new variable to the tweak bar.
Arguments:
----------
name : str
The name of the variable. It will be displayed in the tweak bar if
no label is specified for this variable. It will also be used to
refer to this variable in other functions, so choose a unique,
simple and short name and avoid special characters like spaces or
punctuation marks.
value : ctypes
Value of the variable
vtype : TYPE_xxx
Type of the variable. It must be one of the TYPE_xxx constants or an enum type.
readonly: bool
Makes a variable read-only or read-write. The user would be able to
modify it or not.
getter : func(data) or func()
The callback function that will be called by AntTweakBar to get the
variable's value.
setter : func(value, data)
The callback function that will be called to change the variable's
value.
data : object
Data to be send to getter/setter functions
Keyword arguments:
------------------
label : str
Changes the label of a variable, that is the name displayed before
its value. By default, the label is the name used when the variable
was added to a bar.
help : str
Defines the help message associated to a variable. This message will
be displayed inside the Help bar automatically created to help the
user.
group : str
Move a variable into a group. This allows you to regroup
variables. If groupname does not exist, it is created and added to
the bar. You can also put groups into groups, and so obtain a
hierarchical organization.
visible: bool
Show or hide a variable.
min / max: scalar
Set maximum and minimum value of a variable. Thus, user cannot
exceed these bounding values when (s)he edit the variable.
step: scalar
Set a step value for a variable. When user interactively edit the
variable, it is incremented or decremented by this value.
precision : scalar
Defines the number of significant digits printed after the period
for floating point variables. This number must be between 0 and 12,
or -1 to disable precision and use the default formating.
If precision is not defined and step is defined, the step number of
significant digits is used for defining the precision.
hexa : bool
For integer variables only.
Print an integer variable as hexadecimal or decimal number.
True / False : str
For boolean variables only.
By default, if a boolean variable is true, it is displayed as 'ON',
and if it is false, as 'OFF'. You can change this message with the
true and false parameters, the new string will replace the previous
message.
opened : bool
For groups only.
Fold or unfold a group displayed in a tweak bar (as when the +/-
button displayed in front of the group is clicked).
'''
groups = name.split('/')
name = groups[-1]
_typemap = {ctypes.c_bool: TW_TYPE_BOOL8,
ctypes.c_int: TW_TYPE_INT32,
ctypes.c_long: TW_TYPE_INT32,
ctypes.c_float: TW_TYPE_FLOAT,
ctypes.c_float * 3: TW_TYPE_COLOR3F,
ctypes.c_float * 4: TW_TYPE_COLOR4F,
ctypes.c_char * 512: TW_TYPE_CSSTRING(512)}
_typemap_inv = dict([(v, k) for k, v in _typemap.iteritems()])
if vtype is None and value is not None:
vtype = _typemap.get(type(value))
elif vtype:
vtype = _typemap.get(vtype, vtype)
elif vtype is None and getter is not None:
t = type(getter())
if t == bool:
vtype = TW_TYPE_BOOL8
elif t == int:
vtype = TW_TYPE_INT16
elif t == long:
vtype = TW_TYPE_INT32
elif t == float:
vtype = TW_TYPE_FLOAT
else:
raise ValueError("Cannot determin value type")
ctype = _typemap_inv.get(vtype,c_int)
def_str = _dict_to_defs(defs)
if getter:
def wrapped_getter(p, user_data):
v = ctypes.cast(p, ctypes.POINTER(ctype))
d = ctypes.cast(user_data, ctypes.py_object)
if d.value is not None:
v[0] = getter(d.value)
else:
v[0] = getter()
if setter:
def wrapped_setter(p, user_data):
v = ctypes.cast(p, ctypes.POINTER(ctype))
d = ctypes.cast(user_data, ctypes.py_object)
if d.value is not None:
setter(v[0], d.value)
else:
setter(v[0])
if (getter and readonly) or (getter and not setter):
c_callback = GET_FUNC(wrapped_getter)
self._c_callbacks.append(c_callback)
r = TwAddVarCB(self._bar, name, vtype, None, c_callback,
ctypes.py_object(data), def_str)
elif (getter and setter):
c_setter = SET_FUNC(wrapped_setter)
c_getter = GET_FUNC(wrapped_getter)
self._c_callbacks.extend((c_setter, c_getter))
r = TwAddVarCB(self._bar, name, vtype, c_setter, c_getter,
ctypes.py_object(data), def_str)
elif readonly:
r = TwAddVarRO(self._bar, name, vtype, ctypes.byref(value), def_str)
else:
r = TwAddVarRW(self._bar, name, vtype, ctypes.byref(value), def_str)
check_error(r)
if len(groups) > 1:
name = self.name
for i in range(len(groups)-1,0,-1):
self.define("group=%s" % groups[i-1], groups[i])
def add_button(self, name, callback, data=None, **defs):
'''
'''
def wrapped_callback(userdata):
d = ctypes.cast(userdata, ctypes.py_object)
if d.value is not None:
callback(d.value)
else:
callback()
c_callback = BUTTON_FUNC(wrapped_callback)
self._c_callbacks.append(c_callback)
def_str = _dict_to_defs(defs)
data_p = ctypes.py_object(data)
check_error( TwAddButton(self._bar, name, c_callback, data_p, def_str) )
def add_separator(self, name, **defs):
''' '''
def_str = _dict_to_defs(defs)
check_error( TwAddSeparator(self._bar, name, def_str ) )
def define(self, definition='', varname=None):
'''
This function defines optional parameters for tweak bars and
variables. For instance, it allows you to change the color of a tweak
bar, to set a min and a max value for a variable, to add an help
message that inform users of the meaning of a variable, and so on...
If no varname is given, definition is applied to bar, else, it is
applied to the given variable.
'''
if varname:
arg = '%s/%s %s' % (self.name, varname,definition)
else:
arg = '%s %s' % (self.name, definition)
check_error(TwDefine(arg))
def destroy(self):
TwDeleteBar(self._bar)<|fim▁end|> | :type: int
''')
|
<|file_name|>torrent.py<|end_file_name|><|fim▁begin|>""" PyroScope - Controller "torrent".
Copyright (c) 2009 The PyroScope Project <[email protected]><|fim▁hole|> it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import logging
from pylons import request, response, session, tmpl_context as c
from pylons.controllers.util import abort, redirect_to
from pyroscope.lib.base import render, PageController
from pyroscope.engines import rtorrent
log = logging.getLogger(__name__)
class TorrentController(PageController):
def __init__(self):
self.proxy = rtorrent.Proxy()
def index(self):
# Redirect to view page
return redirect_to(action="view") #, id="HelpIndex")
def view(self, id):
c.hash = id
c.name = id
c.torrents = list(rtorrent.View(self.proxy, "incomplete").items())
# Return a rendered template
return render("pages/torrent.mako")<|fim▁end|> |
This program is free software; you can redistribute it and/or modify |
<|file_name|>0005_auto_20170402_1502.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-02 15:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('talks', '0004_auto_20170326_1755'),
]
<|fim▁hole|> model_name='talk',
name='fav_count',
field=models.PositiveIntegerField(default=0, verbose_name='favorite count'),
),
migrations.AlterField(
model_name='talk',
name='view_count',
field=models.PositiveIntegerField(default=0, verbose_name='view count'),
),
migrations.AlterField(
model_name='talk',
name='vote_count',
field=models.PositiveIntegerField(default=0, verbose_name='vote count'),
),
]<|fim▁end|> | operations = [
migrations.AlterField( |
<|file_name|>tfrecordio_test.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import binascii
import glob
import gzip
import io
import logging
import os
import pickle
import random
import re
import sys
import unittest
import zlib
from builtins import range
import crcmod
# patches unittest.TestCase to be python3 compatible
import future.tests.base # pylint: disable=unused-import
import apache_beam as beam
from apache_beam import Create
from apache_beam import coders
from apache_beam.io.filesystem import CompressionTypes
from apache_beam.io.tfrecordio import ReadAllFromTFRecord
from apache_beam.io.tfrecordio import ReadFromTFRecord
from apache_beam.io.tfrecordio import WriteToTFRecord
from apache_beam.io.tfrecordio import _TFRecordSink
from apache_beam.io.tfrecordio import _TFRecordUtil
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.test_utils import TempDir
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
try:
import tensorflow.compat.v1 as tf # pylint: disable=import-error
except ImportError:
try:
import tensorflow as tf # pylint: disable=import-error
except ImportError:
tf = None # pylint: disable=invalid-name
logging.warning('Tensorflow is not installed, so skipping some tests.')
# Created by running following code in python:
# >>> import tensorflow as tf
# >>> import base64
# >>> writer = tf.python_io.TFRecordWriter('/tmp/python_foo.tfrecord')
# >>> writer.write(b'foo')
# >>> writer.close()
# >>> with open('/tmp/python_foo.tfrecord', 'rb') as f:
# ... data = base64.b64encode(f.read())
# ... print(data)
FOO_RECORD_BASE64 = b'AwAAAAAAAACwmUkOZm9vYYq+/g=='
# Same as above but containing two records [b'foo', b'bar']
FOO_BAR_RECORD_BASE64 = b'AwAAAAAAAACwmUkOZm9vYYq+/gMAAAAAAAAAsJlJDmJhckYA5cg='
def _write_file(path, base64_records):
record = binascii.a2b_base64(base64_records)
with open(path, 'wb') as f:
f.write(record)
def _write_file_deflate(path, base64_records):
record = binascii.a2b_base64(base64_records)
with open(path, 'wb') as f:
f.write(zlib.compress(record))
def _write_file_gzip(path, base64_records):
record = binascii.a2b_base64(base64_records)
with gzip.GzipFile(path, 'wb') as f:
f.write(record)
class TestTFRecordUtil(unittest.TestCase):
def setUp(self):
self.record = binascii.a2b_base64(FOO_RECORD_BASE64)
def _as_file_handle(self, contents):
result = io.BytesIO()
result.write(contents)
result.seek(0)
return result
def _increment_value_at_index(self, value, index):
l = list(value)
if sys.version_info[0] <= 2:
l[index] = bytes(ord(l[index]) + 1)
return b"".join(l)
else:
l[index] = l[index] + 1
return bytes(l)
def _test_error(self, record, error_text):
with self.assertRaisesRegex(ValueError, re.escape(error_text)):<|fim▁hole|> self.assertEqual(0xf909b029, _TFRecordUtil._masked_crc32c(b'\xff' * 32))
self.assertEqual(0xfebe8a61, _TFRecordUtil._masked_crc32c(b'foo'))
self.assertEqual(
0xe4999b0,
_TFRecordUtil._masked_crc32c(b'\x03\x00\x00\x00\x00\x00\x00\x00'))
def test_masked_crc32c_crcmod(self):
crc32c_fn = crcmod.predefined.mkPredefinedCrcFun('crc-32c')
self.assertEqual(
0xfd7fffa,
_TFRecordUtil._masked_crc32c(
b'\x00' * 32, crc32c_fn=crc32c_fn))
self.assertEqual(
0xf909b029,
_TFRecordUtil._masked_crc32c(
b'\xff' * 32, crc32c_fn=crc32c_fn))
self.assertEqual(
0xfebe8a61, _TFRecordUtil._masked_crc32c(
b'foo', crc32c_fn=crc32c_fn))
self.assertEqual(
0xe4999b0,
_TFRecordUtil._masked_crc32c(
b'\x03\x00\x00\x00\x00\x00\x00\x00', crc32c_fn=crc32c_fn))
def test_write_record(self):
file_handle = io.BytesIO()
_TFRecordUtil.write_record(file_handle, b'foo')
self.assertEqual(self.record, file_handle.getvalue())
def test_read_record(self):
actual = _TFRecordUtil.read_record(self._as_file_handle(self.record))
self.assertEqual(b'foo', actual)
def test_read_record_invalid_record(self):
self._test_error(b'bar', 'Not a valid TFRecord. Fewer than 12 bytes')
def test_read_record_invalid_length_mask(self):
record = self._increment_value_at_index(self.record, 9)
self._test_error(record, 'Mismatch of length mask')
def test_read_record_invalid_data_mask(self):
record = self._increment_value_at_index(self.record, 16)
self._test_error(record, 'Mismatch of data mask')
def test_compatibility_read_write(self):
for record in [b'', b'blah', b'another blah']:
file_handle = io.BytesIO()
_TFRecordUtil.write_record(file_handle, record)
file_handle.seek(0)
actual = _TFRecordUtil.read_record(file_handle)
self.assertEqual(record, actual)
class TestTFRecordSink(unittest.TestCase):
def _write_lines(self, sink, path, lines):
f = sink.open(path)
for l in lines:
sink.write_record(f, l)
sink.close(f)
def test_write_record_single(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
record = binascii.a2b_base64(FOO_RECORD_BASE64)
sink = _TFRecordSink(
path,
coder=coders.BytesCoder(),
file_name_suffix='',
num_shards=0,
shard_name_template=None,
compression_type=CompressionTypes.UNCOMPRESSED)
self._write_lines(sink, path, [b'foo'])
with open(path, 'rb') as f:
self.assertEqual(f.read(), record)
def test_write_record_multiple(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
record = binascii.a2b_base64(FOO_BAR_RECORD_BASE64)
sink = _TFRecordSink(
path,
coder=coders.BytesCoder(),
file_name_suffix='',
num_shards=0,
shard_name_template=None,
compression_type=CompressionTypes.UNCOMPRESSED)
self._write_lines(sink, path, [b'foo', b'bar'])
with open(path, 'rb') as f:
self.assertEqual(f.read(), record)
@unittest.skipIf(tf is None, 'tensorflow not installed.')
class TestWriteToTFRecord(TestTFRecordSink):
def test_write_record_gzip(self):
with TempDir() as temp_dir:
file_path_prefix = temp_dir.create_temp_file('result')
with TestPipeline() as p:
input_data = [b'foo', b'bar']
_ = p | beam.Create(input_data) | WriteToTFRecord(
file_path_prefix, compression_type=CompressionTypes.GZIP)
actual = []
file_name = glob.glob(file_path_prefix + '-*')[0]
for r in tf.python_io.tf_record_iterator(
file_name, options=tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.GZIP)):
actual.append(r)
self.assertEqual(sorted(actual), sorted(input_data))
def test_write_record_auto(self):
with TempDir() as temp_dir:
file_path_prefix = temp_dir.create_temp_file('result')
with TestPipeline() as p:
input_data = [b'foo', b'bar']
_ = p | beam.Create(input_data) | WriteToTFRecord(
file_path_prefix, file_name_suffix='.gz')
actual = []
file_name = glob.glob(file_path_prefix + '-*.gz')[0]
for r in tf.python_io.tf_record_iterator(
file_name, options=tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.GZIP)):
actual.append(r)
self.assertEqual(sorted(actual), sorted(input_data))
class TestReadFromTFRecord(unittest.TestCase):
def test_process_single(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file(path, FOO_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path,
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO,
validate=True))
assert_that(result, equal_to([b'foo']))
def test_process_multiple(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path,
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO,
validate=True))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_deflate(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file_deflate(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path,
coder=coders.BytesCoder(),
compression_type=CompressionTypes.DEFLATE,
validate=True))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_gzip(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file_gzip(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path,
coder=coders.BytesCoder(),
compression_type=CompressionTypes.GZIP,
validate=True))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_auto(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result.gz')
_write_file_gzip(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path,
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO,
validate=True))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_gzip(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file_gzip(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path, compression_type=CompressionTypes.GZIP))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_gzip_auto(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result.gz')
_write_file_gzip(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| ReadFromTFRecord(
path, compression_type=CompressionTypes.AUTO))
assert_that(result, equal_to([b'foo', b'bar']))
class TestReadAllFromTFRecord(unittest.TestCase):
def _write_glob(self, temp_dir, suffix):
for _ in range(3):
path = temp_dir.create_temp_file(suffix)
_write_file(path, FOO_BAR_RECORD_BASE64)
def test_process_single(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file(path, FOO_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| Create([path])
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO))
assert_that(result, equal_to([b'foo']))
def test_process_multiple(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| Create([path])
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_glob(self):
with TempDir() as temp_dir:
self._write_glob(temp_dir, 'result')
glob = temp_dir.get_path() + os.path.sep + '*result'
with TestPipeline() as p:
result = (p
| Create([glob])
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO))
assert_that(result, equal_to([b'foo', b'bar'] * 3))
def test_process_multiple_globs(self):
with TempDir() as temp_dir:
globs = []
for i in range(3):
suffix = 'result' + str(i)
self._write_glob(temp_dir, suffix)
globs.append(temp_dir.get_path() + os.path.sep + '*' + suffix)
with TestPipeline() as p:
result = (p
| Create(globs)
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO))
assert_that(result, equal_to([b'foo', b'bar'] * 9))
def test_process_deflate(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file_deflate(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| Create([path])
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.DEFLATE))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_gzip(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
_write_file_gzip(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| Create([path])
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.GZIP))
assert_that(result, equal_to([b'foo', b'bar']))
def test_process_auto(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result.gz')
_write_file_gzip(path, FOO_BAR_RECORD_BASE64)
with TestPipeline() as p:
result = (p
| Create([path])
| ReadAllFromTFRecord(
coder=coders.BytesCoder(),
compression_type=CompressionTypes.AUTO))
assert_that(result, equal_to([b'foo', b'bar']))
class TestEnd2EndWriteAndRead(unittest.TestCase):
def create_inputs(self):
input_array = [[random.random() - 0.5 for _ in range(15)]
for _ in range(12)]
memfile = io.BytesIO()
pickle.dump(input_array, memfile)
return memfile.getvalue()
def test_end2end(self):
with TempDir() as temp_dir:
file_path_prefix = temp_dir.create_temp_file('result')
# Generate a TFRecord file.
with TestPipeline() as p:
expected_data = [self.create_inputs() for _ in range(0, 10)]
_ = p | beam.Create(expected_data) | WriteToTFRecord(file_path_prefix)
# Read the file back and compare.
with TestPipeline() as p:
actual_data = p | ReadFromTFRecord(file_path_prefix + '-*')
assert_that(actual_data, equal_to(expected_data))
def test_end2end_auto_compression(self):
with TempDir() as temp_dir:
file_path_prefix = temp_dir.create_temp_file('result')
# Generate a TFRecord file.
with TestPipeline() as p:
expected_data = [self.create_inputs() for _ in range(0, 10)]
_ = p | beam.Create(expected_data) | WriteToTFRecord(
file_path_prefix, file_name_suffix='.gz')
# Read the file back and compare.
with TestPipeline() as p:
actual_data = p | ReadFromTFRecord(file_path_prefix + '-*')
assert_that(actual_data, equal_to(expected_data))
def test_end2end_auto_compression_unsharded(self):
with TempDir() as temp_dir:
file_path_prefix = temp_dir.create_temp_file('result')
# Generate a TFRecord file.
with TestPipeline() as p:
expected_data = [self.create_inputs() for _ in range(0, 10)]
_ = p | beam.Create(expected_data) | WriteToTFRecord(
file_path_prefix + '.gz', shard_name_template='')
# Read the file back and compare.
with TestPipeline() as p:
actual_data = p | ReadFromTFRecord(file_path_prefix + '.gz')
assert_that(actual_data, equal_to(expected_data))
@unittest.skipIf(tf is None, 'tensorflow not installed.')
def test_end2end_example_proto(self):
with TempDir() as temp_dir:
file_path_prefix = temp_dir.create_temp_file('result')
example = tf.train.Example()
example.features.feature['int'].int64_list.value.extend(list(range(3)))
example.features.feature['bytes'].bytes_list.value.extend(
[b'foo', b'bar'])
with TestPipeline() as p:
_ = p | beam.Create([example]) | WriteToTFRecord(
file_path_prefix, coder=beam.coders.ProtoCoder(example.__class__))
# Read the file back and compare.
with TestPipeline() as p:
actual_data = (p | ReadFromTFRecord(
file_path_prefix + '-*',
coder=beam.coders.ProtoCoder(example.__class__)))
assert_that(actual_data, equal_to([example]))
def test_end2end_read_write_read(self):
with TempDir() as temp_dir:
path = temp_dir.create_temp_file('result')
with TestPipeline() as p:
# Initial read to validate the pipeline doesn't fail before the file is
# created.
_ = p | ReadFromTFRecord(path + '-*', validate=False)
expected_data = [self.create_inputs() for _ in range(0, 10)]
_ = p | beam.Create(expected_data) | WriteToTFRecord(
path, file_name_suffix='.gz')
# Read the file back and compare.
with TestPipeline() as p:
actual_data = p | ReadFromTFRecord(path+'-*', validate=True)
assert_that(actual_data, equal_to(expected_data))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()<|fim▁end|> | _TFRecordUtil.read_record(self._as_file_handle(record))
def test_masked_crc32c(self):
self.assertEqual(0xfd7fffa, _TFRecordUtil._masked_crc32c(b'\x00' * 32)) |
<|file_name|>subst.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
use middle::ty;
use util::ppaux::Repr;
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`.
pub trait Subst {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
///////////////////////////////////////////////////////////////////////////
// Substitution over types
//
// Because this is so common, we make a special optimization to avoid
// doing anything if `substs` is a no-op. I tried to generalize these
// to all subst methods but ran into trouble due to the limitations of
// our current method/trait matching algorithm. - Niko
trait EffectfulSubst {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Self;
}
impl Subst for ty::t {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if ty::substs_is_noop(substs) {
return *self;
} else {
return self.effectfulSubst(tcx, substs);
}
}
}
impl EffectfulSubst for ty::t {
fn effectfulSubst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::t {
if !ty::type_needs_subst(*self) {
return *self;
}
match ty::get(*self).sty {
ty::ty_param(p) => {
substs.tps[p.idx]
}
ty::ty_self(_) => {
substs.self_ty.expect("ty_self not found in substs")
}
_ => {
ty::fold_regions_and_ty(
tcx, *self,
|r| r.subst(tcx, substs),
|t| t.effectfulSubst(tcx, substs),
|t| t.effectfulSubst(tcx, substs))
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Other types
impl<T:Subst> Subst for ~[T] {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ~[T] {
self.map(|t| t.subst(tcx, substs))
}
}
impl<T:Subst> Subst for @T {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> @T {
match self {
&@ref t => @t.subst(tcx, substs)
}
}
}
impl<T:Subst> Subst for Option<T> {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> Option<T> {
self.map(|t| t.subst(tcx, substs))
}<|fim▁hole|>}
impl Subst for ty::TraitRef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TraitRef {
ty::TraitRef {
def_id: self.def_id,
substs: self.substs.subst(tcx, substs)
}
}
}
impl Subst for ty::substs {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::substs {
ty::substs {
self_r: self.self_r.subst(tcx, substs),
self_ty: self.self_ty.map(|typ| typ.subst(tcx, substs)),
tps: self.tps.map(|typ| typ.subst(tcx, substs))
}
}
}
impl Subst for ty::BareFnTy {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::BareFnTy {
ty::fold_bare_fn_ty(self, |t| t.subst(tcx, substs))
}
}
impl Subst for ty::ParamBounds {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ParamBounds {
ty::ParamBounds {
builtin_bounds: self.builtin_bounds,
trait_bounds: self.trait_bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::TypeParameterDef {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::TypeParameterDef {
ty::TypeParameterDef {
def_id: self.def_id,
bounds: self.bounds.subst(tcx, substs)
}
}
}
impl Subst for ty::Generics {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Generics {
ty::Generics {
type_param_defs: self.type_param_defs.subst(tcx, substs),
region_param: self.region_param
}
}
}
impl Subst for ty::Region {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::Region {
// Note: This routine only handles the self region, because it
// is only concerned with substitutions of regions that appear
// in types. Region substitution of the bound regions that
// appear in a function signature is done using the
// specialized routine
// `middle::typeck::check::regionmanip::replace_bound_regions_in_fn_sig()`.
// As we transition to the new region syntax this distinction
// will most likely disappear.
match self {
&ty::re_bound(ty::br_self) => {
match substs.self_r {
None => {
tcx.sess.bug(
fmt!("ty::Region#subst(): \
Reference to self region when \
given substs with no self region: %s",
substs.repr(tcx)));
}
Some(self_r) => self_r
}
}
_ => *self
}
}
}
impl Subst for ty::ty_param_bounds_and_ty {
fn subst(&self, tcx: ty::ctxt, substs: &ty::substs) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: self.generics.subst(tcx, substs),
ty: self.ty.subst(tcx, substs)
}
}
}<|fim▁end|> | |
<|file_name|>slice.rs<|end_file_name|><|fim▁begin|>use super::{ToRuby, ToRubyResult};
use sys::{rb_ary_new_capa, rb_ary_push};
impl<'a, T> ToRuby for &'a [T] where &'a T: ToRuby {
fn to_ruby(self) -> ToRubyResult {
let ary = unsafe { rb_ary_new_capa(self.len() as isize) };
for item in self {
unsafe { rb_ary_push(ary, item.to_ruby()?); }<|fim▁hole|> }
}<|fim▁end|> | }
Ok(ary) |
<|file_name|>duckduckgo2html.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""Retrieve results from the DuckDuckGo zero-click API in simple HTML format."""
import json as jsonlib
import logging
import re
import urllib.request, urllib.error, urllib.parse
__version__ = (1, 0, 0)
def results2html(results, results_priority=None, max_number_of_results=None,
ignore_incomplete=True, always_show_related=False,
header_start_level=1, hide_headers=False, hide_signature=False):
if not results:
return ''
if not results_priority:
results_priority = ['answer', 'abstract', 'definition', 'results',
'infobox', 'redirect', 'related']
if not always_show_related:
other = [x for x in results_priority if x != 'related']
if any(results.get(x).is_complete() for x in other):
results_priority = other
html_header = '<h{level:d}>{title}</h{level:d}>'
html_paragraph = '<p>{contents}</p>'
html_contents = []
children = [results.get(x) for x in results_priority]
results_count = 0
for level, child in _iterchildren(header_start_level, children):
html = child.as_html()
valid = html and (not ignore_incomplete or child.is_complete())
if not hide_headers and child.name and (valid or child.children()):
header = html_header.format(title=child.name, level=level)
html_contents.append(header)
if valid:
html_contents.append(html_paragraph.format(contents=html))
results_count += 1
if max_number_of_results and results_count >= max_number_of_results:
break
html_contents[:] = [x for x in html_contents if x]
if not html_contents:
return ''
if not hide_signature:
html_contents.append('<footer><small>Results from DuckDuckGo</small></footer>')
return ''.join(html_contents).strip()
def search(query, useragent='duckduckgo2html', **kwargs):
params = {
'q': query,
'format': 'json',
'pretty': '1',
'no_redirect': '1',
'no_html': '1',
'skip_disambig': '0',
}
params.update(kwargs)
enc_params = urllib.parse.urlencode(params)
url = 'http://api.duckduckgo.com/?' + enc_params
try:
request = urllib.request.Request(url, headers={'User-Agent': useragent})
response = urllib.request.urlopen(request)
json = jsonlib.loads(response.read().decode('utf-8'))
response.close()
return Results(json)
except urllib.error.HTTPError as err:
logging.error('Query failed with HTTPError code %s', err.code)
except urllib.error.URLError as err:
logging.error('Query failed with URLError %s', err.reason)
except Exception:
logging.error('Unhandled exception')
raise
return None
def _iterchildren(start_level, children):
for item in children:
grandchildren = item.children()<|fim▁hole|> yield start_level, item
if grandchildren:
for subitem in _iterchildren(start_level+1, grandchildren):
yield subitem
def _html_url(url, display=None):
if not display:
display = url
return '<a href="{0}">{1}</a>'.format(url, display)
class Results(object):
def __init__(self, json):
self.json = jsonlib.dumps(json, indent=2)
self.type = json.get('Type')
self.answer = Answer(json)
self.results = _ResultList('Results', json.get('Results', []))
self.related = _ResultList('Related Topics', json.get('RelatedTopics', []))
self.abstract = Abstract(json)
self.definition = Definition(json)
self.redirect = Redirect(json)
self.infobox = Infobox(json)
def get(self, name):
if hasattr(self, name) and getattr(self, name):
return getattr(self, name)
return _ResultItemBase()
class _ResultItemBase(object):
"""Base class for results"""
def __init__(self, name=None):
self.name = name
def is_complete(self):
return False
def children(self):
return []
def as_html(self):
return ''
class _ResultList(_ResultItemBase):
"""A list of results"""
def __init__(self, name, items):
super().__init__(name)
self.items = [Result(x) for x in items]
def children(self):
return self.items
class Result(_ResultItemBase):
def __init__(self, json):
super().__init__(json.get('Name', '') if json else '')
self.topics = [Result(elem) for elem in json.get('Topics', [])]
self.html = json.get('Result', '') if json else ''
self.text = json.get('Text', '') if json else ''
self.url = json.get('FirstURL', '') if json else ''
def is_complete(self):
return True if self.text else False
def children(self):
return self.topics
def as_html(self):
if self.html:
return Result._rex_sub.sub('a> ', self.html)
elif self.text:
return self.text
_rex_sub = re.compile(r'a>(?! )')
class Abstract(_ResultItemBase):
def __init__(self, json):
super().__init__('Abstract')
self.html = json['Abstract']
self.text = json['AbstractText']
self.url = json['AbstractURL']
self.source = json['AbstractSource']
self.heading = json['Heading']
def is_complete(self):
return True if self.html or self.text else False
def as_html(self):
html_list = []
if self.heading:
html_list.append('<b>{0}</b>'.format(self.heading))
if self.html:
html_list.append(self.html)
elif self.text:
html_list.append(self.text)
if self.url:
html_list.append(_html_url(self.url, self.source))
return ' - '.join(html_list)
class Answer(_ResultItemBase):
def __init__(self, json):
super().__init__('Answer')
self.text = json['Answer']
self.type = json['AnswerType']
self.url = None
def is_complete(self):
return True if self.text else False
def as_html(self):
return self.text.replace('\n', '<br>').replace('\r', '')
class Definition(_ResultItemBase):
def __init__(self, json):
super().__init__('Definition')
self.text = json['Definition']
self.url = json['DefinitionURL']
self.source = json['DefinitionSource']
def is_complete(self):
return True if self.text else False
def as_html(self):
if self.text and self.url:
return self.text + ' - ' + _html_url(self.url, self.source)
elif self.text:
return self.text
elif self.url:
return _html_url(self.url, self.source)
class Redirect(_ResultItemBase):
def __init__(self, json):
super().__init__('Redirect')
self.url = json['Redirect']
def is_complete(self):
return True if self.url else False
def as_html(self):
return _html_url(self.url) if self.url else None
class Infobox(_ResultItemBase):
class Content(object):
def __init__(self, json):
self.data_type = json.get('data_type', '') if json else ''
self.label = json.get('label', '') if json else ''
self.value = json.get('value', '') if json else ''
def as_html(self):
if self.data_type == 'string' and self.label and self.value:
return '<b>{0}</b> {1}'.format(self.label, self.value)
def __init__(self, json):
super().__init__('Infobox')
infobox = json.get('Infobox') if json.get('Infobox') else {}
self.meta = infobox.get('meta', [])
self.content = [Infobox.Content(x) for x in infobox.get('content', [])]
def is_complete(self):
return True if self.content else False
def as_html(self):
contents = [x.as_html() for x in self.content]
return '<br>'.join(x for x in contents if x)
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'query',
nargs='*',
help='the search query')
parser.add_argument(
'-v', '--version',
action='version',
version='%(prog)s v{0}.{1}.{2}'.format(*__version__))
args = parser.parse_args()
logging.basicConfig(format='%(levelname)s: %(filename)s: %(message)s')
if args.query:
queries = [' '.join(args.query)]
elif not sys.stdin.isatty():
queries = sys.stdin.read().splitlines()
else:
parser.print_help()
sys.exit(1)
for query in queries:
html = results2html(search(query))
if html:
print(html)
else:
logging.warning('No results found')<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn main() {
let mut my_struct = MyStruct {
my_field1: 0,
my_field2: 0,
my_field3: 0,
my_field4: 0,
my_field5: 0,
};
my_struct.my_field1 = 7;
assert_eq!(7, my_struct.my_field1);
assert_eq!(0, my_struct.my_field2);
assert_eq!(0, my_struct.my_field3);
assert_eq!(0, my_struct.my_field4);
assert_eq!(0, my_struct.my_field5);
let my_struct = my_struct; // set my_struct to be immutable
let my_struct2 = MyStruct { my_field3: 5, ..my_struct }; // copy unspecified fields<|fim▁hole|>
assert_eq!(7, my_struct2.my_field1);
assert_eq!(0, my_struct2.my_field2);
assert_eq!(5, my_struct2.my_field3);
assert_eq!(0, my_struct2.my_field4);
assert_eq!(0, my_struct2.my_field5);
let my_tuple_struct = MyTupleStruct(7, 42, 3.14);
assert_eq!(7, my_tuple_struct.0);
assert_eq!(42, my_tuple_struct.1);
assert_eq!(3.14, my_tuple_struct.2);
}
struct MyStruct {
my_field1: i32,
my_field2: i32,
my_field3: i32,
my_field4: i32,
my_field5: i32,
}
struct MyTupleStruct(i32, i32, f32);<|fim▁end|> | |
<|file_name|>feedback_vote.js<|end_file_name|><|fim▁begin|>var smidig = smidig || {};
smidig.voter = (function($) {
var talk_id, $form, $notice, $loader;
function supports_html5_storage() {
try {
return 'localStorage' in window && window['localStorage'] !== null;
} catch (e) {
return false;
}
}
function save_vote($form, talk_id) {
var votes = JSON.parse(localStorage.getItem("votes")) || {};
votes[talk_id] = true;
localStorage.setItem("votes", JSON.stringify(votes));
finished($form);
}
function bind($form, talk_id) {
$form.find("input[name='commit']").click(function() {
var data = $form.serializeObject();
if(!data["feedback_vote[vote]"]) {
alert("Du må gi minst 1 stjerne!");
return;
}
$form.find(".inputs").hide();
$form.find(".ajaxloader").show();
$form.find(".ajaxloader").css("visibility", "visible")
$.ajax({
type: "POST",
url: $form.attr("action"),
dataType: 'json',
data: data,
complete: function(xhr) {
if (xhr.readyState == 4) {
if (xhr.status == 201) {
smidig.voter.save_vote($form, talk_id);
}
} else {
alert("Det skjedde noe galt ved lagring. Prøv igjen");
$form.find(".ajaxloader").hide();
$form.find(".inputs").show();
}
}
});
//cancel submit event..
return false;
});
}
function init($form, talk_id) {
if(!supports_html5_storage()) {
$notice.text("Din enhet støttes ikke");
finished($form);
}
var votes = JSON.parse(localStorage.getItem("votes")) || {};
if(votes[talk_id]) {
finished($form);
} else {
bind($form, talk_id);
}
}
function finished($form) {
$form.empty();
$form.append("<em>(Du har stemt)</em>");
$form.show();
}
return {
init: init,
save_vote: save_vote
};
})(jQuery);
//Document on load!
$(function() {
$(".talk").each(function() {
var talk_id = $(this).data("talkid");
if(talk_id) {
var voteTmpl = $("#tmplVote").tmpl({talk_id: talk_id});
voteTmpl.find("input.star").rating();
$(this).find(".description").append(voteTmpl);
smidig.voter.init(voteTmpl, talk_id);
}
});
});
//Extensions to serialize a form to a js-object.
$.fn.serializeObject = function(){
var o = {};
var a = this.serializeArray();<|fim▁hole|> o[this.name] = [o[this.name]];
}
o[this.name].push(this.value || '');
} else {
o[this.name] = this.value || '';
}
});
return o;
};<|fim▁end|> | $.each(a, function() {
if (o[this.name] !== undefined) {
if (!o[this.name].push) { |
<|file_name|>Movies.js<|end_file_name|><|fim▁begin|>enyo.kind({
name: "Remote.Movies",<|fim▁hole|> onPlay: "",
},
components: [
{kind: "PageHeader", components: [
{name: "headerText", kind: enyo.VFlexBox,
content: "", flex: 1
},
{name: "backButton", kind: "Button", content: "Back",
onclick: "goBack"
}
]},
{name: "pane", kind: "Pane", flex: 1, components: [
{name: "movies", className: "enyo-bg", kind: "Remote.MovieList",
onSelect: "selectMovie"
},
]},
],
update: function() {
this.$.pane.view.update();
},
selectMovie: function(inSender, inMovie) {
this.doPlay(inMovie.id);
},
});<|fim▁end|> | kind: "VFlexBox",
events: { |
<|file_name|>show_reply_thumbnails.js<|end_file_name|><|fim▁begin|>//================================================
// show_reply_thumbnails.js
// Author: @iihoshi
//================================================
(function ($, jn) {
var my_filename = 'show_reply_thumbnails.js';
// プラグイン情報 ここから
// プラグイン情報の初期化
if (!jn.pluginInfo)
jn.pluginInfo = {};
// プラグイン情報本体
jn.pluginInfo[my_filename.split('.')[0]] = {
'name' : {<|fim▁hole|> 'author' : {
'en' : '@iihoshi'
},
'version' : '1.0.1',
'file' : my_filename,
'language' : ['en', 'ja'],
'last_update' : "2015/9/24",
'update_timezone' : '9',
'jnVersion' : '4.3.1.0',
'description' : {
'ja' : 'リプライにサムネイル画像があれば表示します。',
'en' : 'Shows thumbnails if a tweet in a reply chain has them.'
},
'updateinfo' : 'http://www.colorless-sight.jp/archives/JanetterPluginUpdateInfo.txt'
};
// プラグイン情報ここまで
if ((_Janetter_Window_Type != "main") && (_Janetter_Window_Type != "profile")) {
return;
}
function initForThumbnail() {
var orig_janetterThumbnail = $.fn.janetterThumbnail.toString();
var re_siblings = /_content\.siblings\('div\.tweet-thumb'\);$/m;
var re_find = /_content\.find\('div\.tweet-body /gm;
if (!re_siblings.test(orig_janetterThumbnail) ||
!re_find.test(orig_janetterThumbnail)) {
return false;
}
var replaced = orig_janetterThumbnail
.replace(re_siblings, "_content.children('div.tweet-thumb');")
.replace(re_find, "_content.find('div.tweet-reply-body ");
// console.log(replaced);
eval('$.fn.janetterThumbnailForReply = ' + replaced);
var tnMouseOverHandler = function () {
var $this = $(this),
tweet_reply = $this.parents('div.tweet-reply:first');
$this.unbind('mouseover', tnMouseOverHandler);
if (tweet_reply.length > 0) {
tweet_reply.janetterThumbnailForReply($this.attr('href'));
}
};
$.fn.janetterThumbnailForReplyEventSet = function () {
this.bind('mouseover', tnMouseOverHandler);
return this;
};
return true;
}
function initForExpandUrl() {
var orig_jn_expandUrl = jn.expandUrl.toString();
var re_tweet_content = /div\.tweet-content:first/m;
var re_janetterThumbnail = /\.janetterThumbnail(\W)/gm;
if (!re_tweet_content.test(orig_jn_expandUrl) ||
!re_janetterThumbnail.test(orig_jn_expandUrl)) {
return false;
}
var replaced = orig_jn_expandUrl
.replace(re_tweet_content, 'div.tweet-reply:first')
.replace(re_janetterThumbnail, '.janetterThumbnailForReply$1');
// console.log(replaced);
eval('var expandUrl = ' + replaced);
var euMouseOverHandler = function () {
var $this = $(this);
$this.unbind('mouseover', euMouseOverHandler);
expandUrl($this);
}
$.fn.janetterExpandUrlForReplyEventSet = function () {
this.bind('mouseover', euMouseOverHandler);
return this;
}
return true;
}
// 本プラグインの初期化処理(onInitializeDone 時)
function initOnInitialized() {
if (!initForThumbnail() || !initForExpandUrl()) {
new jn.msgdialog({
title: my_filename,
icon: '',
message: 'Sorry, ' + my_filename+ ' cannot be installed.',
buttons: [ janet.msg.ok ],
});
return;
}
var orig_generateReply = jn.generateReply;
jn.generateReply = function (item, is_default) {
var reply = orig_generateReply(item, is_default);
reply.append('<div class="tweet-thumb"/>');
var a = reply.find('.tweet-reply-body > p.text').children('a.link');
a.janetterExpandUrlForReplyEventSet();
if (jn.conf.disp_thumbnail == 'over')
a.janetterThumbnailForReplyEventSet();
else
reply.janetterThumbnailForReply(a);
return reply;
};
console.log(my_filename + ' has been initialized.');
}
if (jn.temp.initialized) {
// The original onInitializeDone() has already been called!
initOnInitialized();
} else {
var orig_onInitializeDone = jn.onInitializeDone;
jn.onInitializeDone = function () {
orig_onInitializeDone && orig_onInitializeDone.apply(this, arguments);
initOnInitialized();
};
}
})(jQuery, janet);<|fim▁end|> | 'ja' : 'リプライサムネイル表示',
'en' : 'Show thumbnails in reply chain'
}, |
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>// #docplaster
// #docregion
// Import the native Angular services.
import { Component } from '@angular/core';
import { Title } from '@angular/platform-browser';
@Component({<|fim▁hole|> `<p>
Select a title to set on the current HTML document:
</p>
<ul>
<li><a (click)="setTitle( 'Good morning!' )">Good morning</a>.</li>
<li><a (click)="setTitle( 'Good afternoon!' )">Good afternoon</a>.</li>
<li><a (click)="setTitle( 'Good evening!' )">Good evening</a>.</li>
</ul>
`
})
// #docregion class
export class AppComponent {
public constructor(private titleService: Title ) { }
public setTitle( newTitle: string) {
this.titleService.setTitle( newTitle );
}
}
// #enddocregion class<|fim▁end|> | selector: 'app-root',
template: |
<|file_name|>331_srtp_prefer_rtp_avp.py<|end_file_name|><|fim▁begin|><|fim▁hole|># $Id: 331_srtp_prefer_rtp_avp.py 2081 2008-06-27 21:59:15Z bennylp $
import inc_sip as sip
import inc_sdp as sdp
# When SRTP is NOT enabled in pjsua, it should prefer to use
# RTP/AVP media line if there are multiple m=audio lines
sdp = \
"""
v=0
o=- 0 0 IN IP4 127.0.0.1
s=-
c=IN IP4 127.0.0.1
t=0 0
m=audio 5000 RTP/SAVP 0
a=crypto:1 aes_cm_128_hmac_sha1_80 inline:WnD7c1ksDGs+dIefCEo8omPg4uO8DYIinNGL5yxQ
m=audio 4000 RTP/AVP 0
"""
pjsua_args = "--null-audio --auto-answer 200 --use-srtp 0"
extra_headers = ""
include = ["Content-Type: application/sdp", # response must include SDP
"m=audio 0 RTP/SAVP[\\s\\S]+m=audio [1-9]+[0-9]* RTP/AVP"
]
exclude = ["a=crypto"]
sendto_cfg = sip.SendtoCfg("Prefer RTP/SAVP", pjsua_args, sdp, 200,
extra_headers=extra_headers,
resp_inc=include, resp_exc=exclude)<|fim▁end|> | |
<|file_name|>ColumnChooserTable.component.js<|end_file_name|><|fim▁begin|>import React from 'react';
import PropTypes from 'prop-types';
import ColumnChooserRow from '../ColumnChooserRow';
import { columnsPropTypes } from '../../columnChooser.propTypes';
const ColumnChooserTable = ({ columns = [], id, onChangeCheckbox, t }) =>
columns.map(column => (
<ColumnChooserRow key={column.key}>
<ColumnChooserRow.Checkbox
checked={column.visible}
id={id}
dataFeature="column-chooser.select"
description={t('CHECKBOX_DISPLAY_COLUMN_DESCRIPTION', {
defaultValue: 'display the column {{label}}',
label: column.label,
})}
label={column.label}
locked={column.locked}
onChange={onChangeCheckbox}
t={t}
/>
</ColumnChooserRow>
));
ColumnChooserTable.propTypes = {<|fim▁hole|> columns: columnsPropTypes,
id: PropTypes.string.isRequired,
onChangeCheckbox: PropTypes.func.isRequired,
t: PropTypes.func.isRequired,
};
export default ColumnChooserTable;<|fim▁end|> | |
<|file_name|>category.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import with_statement
from django import forms
from django.views.generic import DetailView
from shoop.core.models import Category, Manufacturer, Product
from shoop.front.utils.product_sorting import (
PRODUCT_SORT_CHOICES, sort_products
)
from shoop.front.utils.views import cache_product_things
class ProductListForm(forms.Form):
sort = forms.CharField(required=False, widget=forms.Select(choices=PRODUCT_SORT_CHOICES))
manufacturers = forms.ModelMultipleChoiceField(queryset=Manufacturer.objects.all(), required=False)
class CategoryView(DetailView):
template_name = "shoop/front/product/category.jinja"
model = Category
template_object_name = "category"
def get_queryset(self):
return self.model.objects.all_visible(
customer=self.request.customer,
shop=self.request.shop,
)
def get_context_data(self, **kwargs):
context = super(CategoryView, self).get_context_data(**kwargs)
category = self.object
context["form"] = form = ProductListForm(data=self.request.GET)
form.full_clean()
filters = {
"shop_products__shop": self.request.shop,
"shop_products__categories": category,
"variation_parent": None
}
manufacturers = form.cleaned_data.get("manufacturers")<|fim▁hole|> products = Product.objects.list_visible(
customer=self.request.customer,
shop=self.request.shop
).filter(**filters).distinct()
products = cache_product_things(self.request, products)
products = sort_products(self.request, products, self.request.GET.get("sort"))
context["products"] = products
return context<|fim▁end|> | if manufacturers:
filters["manufacturer__in"] = manufacturers
|
<|file_name|>default.js<|end_file_name|><|fim▁begin|>Meteor.startup(function () {
});
Deps.autorun(function(){<|fim▁hole|>});<|fim▁end|> | Meteor.subscribe('userData'); |
<|file_name|>general.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import time
#import struct
from pycket import impersonators as imp
from pycket import values, values_string
from pycket.cont import continuation, loop_label, call_cont
from pycket.arity import Arity
from pycket import values_parameter
from pycket import values_struct
from pycket import values_regex
from pycket import vector as values_vector
from pycket.error import SchemeException, UserException
from pycket.foreign import W_CPointer, W_CType
from pycket.hash.equal import W_EqualHashTable
from pycket.hash.base import W_HashTable
from pycket.hash.simple import (W_EqImmutableHashTable, W_EqvImmutableHashTable, W_EqMutableHashTable, W_EqvMutableHashTable, make_simple_immutable_table)
from pycket.prims.expose import (unsafe, default, expose, expose_val, prim_env,
procedure, define_nyi, subclass_unsafe, make_procedure)
from pycket.prims.primitive_tables import *
from pycket.prims import string
from pycket.racket_paths import racket_sys_paths
from pycket.env import w_global_config
from rpython.rlib import jit, objectmodel, unroll, rgc
from rpython.rlib.rsre import rsre_re as re
# import for side effects
from pycket.prims import control
from pycket.prims import continuation_marks
from pycket.prims import char
from pycket.prims import box
from pycket.prims import equal as eq_prims
from pycket.prims import foreign
from pycket.prims import hash
from pycket.prims import impersonator
from pycket.prims import input_output
from pycket.prims import logging
from pycket.prims import numeric
from pycket.prims import parameter
from pycket.prims import random
from pycket.prims import regexp
from pycket.prims import string
from pycket.prims import struct_structinfo
from pycket.prims import undefined
from pycket.prims import vector
from rpython.rlib import jit
def make_pred(name, cls):
@expose(name, [values.W_Object], simple=True)
def predicate_(a):
return values.W_Bool.make(isinstance(a, cls))
predicate_.__name__ += cls.__name__
def make_dummy_char_pred(name):
@expose(name, [values.W_Character], simple=True)
def predicate_(a):
return values.w_false
predicate_.__name__ += name
def make_pred_eq(name, val):
typ = type(val)
@expose(name, [values.W_Object], simple=True)
def pred_eq(a):
return values.W_Bool.make(a is val)
for args in [
("output-port?", values.W_OutputPort),
("pair?", values.W_Cons),
("mpair?", values.W_MCons),
("number?", values.W_Number),
("complex?", values.W_Number),
("fixnum?", values.W_Fixnum),
("flonum?", values.W_Flonum),
("vector?", values.W_MVector),
("string?", values_string.W_String),
("symbol?", values.W_Symbol),
("boolean?", values.W_Bool),
("inspector?", values_struct.W_StructInspector),
("struct-type?", values_struct.W_StructType),
("struct-constructor-procedure?", values_struct.W_StructConstructor),
("struct-predicate-procedure?", values_struct.W_StructPredicate),
("struct-type-property?", values_struct.W_StructProperty),
("struct-type-property-accessor-procedure?",
values_struct.W_StructPropertyAccessor),
("box?", values.W_Box),
("variable-reference?", values.W_VariableReference),
("thread-cell?", values.W_ThreadCell),
("thread-cell-values?", values.W_ThreadCellValues),
("semaphore?", values.W_Semaphore),
("semaphore-peek-evt?", values.W_SemaphorePeekEvt),
("path?", values.W_Path),
("bytes?", values.W_Bytes),
("pseudo-random-generator?", values.W_PseudoRandomGenerator),
("char?", values.W_Character),
("continuation?", values.W_Continuation),
("continuation-mark-set?", values.W_ContinuationMarkSet),
("continuation-mark-key?", values.W_ContinuationMarkKey),
("primitive?", values.W_Prim),
("keyword?", values.W_Keyword),
("weak-box?", values.W_WeakBox),
("ephemeron?", values.W_Ephemeron),
("placeholder?", values.W_Placeholder),
("hash-placeholder?", values.W_HashTablePlaceholder),
("module-path-index?", values.W_ModulePathIndex),
("resolved-module-path?", values.W_ResolvedModulePath),
("impersonator-property-accessor-procedure?",
imp.W_ImpPropertyAccessor),
("impersonator-property?", imp.W_ImpPropertyDescriptor),
("parameter?", values_parameter.W_BaseParameter),
("parameterization?", values_parameter.W_Parameterization),
("hash?", W_HashTable),
("cpointer?", W_CPointer),
("ctype?", W_CType),
("continuation-prompt-tag?", values.W_ContinuationPromptTag),
("logger?", values.W_Logger),
("log-receiver?", values.W_LogReciever),
("evt?", values.W_Evt),
("unquoted-printing-string?", values.W_UnquotedPrintingString),
("port?", values.W_Port),
("security-guard?", values.W_SecurityGuard),
# FIXME
("will-executor?", values.W_WillExecutor),
("bytes-converter?", values.W_Impossible),
("fsemaphore?", values.W_Impossible),
("thread-group?", values.W_Impossible),
("udp?", values.W_Impossible),
("extflonum?", values.W_ExtFlonum),
("custodian-box?", values.W_Impossible),
("custodian?", values.W_Impossible),
("future?", values.W_Impossible),
]:
make_pred(*args)
for args in [
("void?", values.w_void),
("false?", values.w_false),
("null?", values.w_null),
]:
make_pred_eq(*args)
@expose("hash-weak?", [values.W_Object], simple=True)
def hash_weah_huh(obj):
# FIXME
return values.w_false
@expose("hash-strong?", [values.W_Object], simple=True)
def hash_strong_huh(obj):
# FIXME: /pypy/rpython/rlib/rweakref.py
return values.W_Bool.make(isinstance(obj, W_HashTable))
@expose("hash-ephemeron?", [values.W_Object], simple=True)
def hash_strong_huh(obj):
# FIXME
return values.w_false
@expose("hash-equal?", [values.W_Object], simple=True)
def hash_eq(obj):
inner = obj
if isinstance(obj, imp.W_ImpHashTable) or isinstance(obj, imp.W_ChpHashTable):
inner = obj.get_proxied()
return values.W_Bool.make(isinstance(inner, W_EqualHashTable))
@expose("hash-eq?", [values.W_Object], simple=True)
def hash_eq(obj):
inner = obj
if isinstance(obj, imp.W_ImpHashTable) or isinstance(obj, imp.W_ChpHashTable):
inner = obj.get_proxied()
eq_mutable = isinstance(inner, W_EqMutableHashTable)
eq_immutable = isinstance(inner, W_EqImmutableHashTable)
return values.W_Bool.make(eq_mutable or eq_immutable)
@expose("hash-eqv?", [values.W_Object], simple=True)
def hash_eqv(obj):
inner = obj
if isinstance(obj, imp.W_ImpHashTable) or isinstance(obj, imp.W_ChpHashTable):
inner = obj.get_proxied()
eqv_mutable = isinstance(inner, W_EqvMutableHashTable)
eqv_immutable = isinstance(inner, W_EqvImmutableHashTable)
return values.W_Bool.make(eqv_mutable or eqv_immutable)
def struct_port_huh(w_struct):
w_in, w_out = struct_port_prop_huh(w_struct)
return (w_in is not None) or (w_out is not None)
def struct_port_prop_huh(w_struct):
w_type = w_struct.struct_type()
in_property = out_property = None
for property in w_type.properties:
w_property, w_value = property
if w_property is values_struct.w_prop_input_port:
in_property = w_value
elif w_property is values_struct.w_prop_output_port:
out_property = w_value
return in_property, out_property
def struct_input_port_huh(w_struct):
w_in, w_out = struct_port_prop_huh(w_struct)
return w_in is not None
def struct_output_port_huh(w_struct):
w_in, w_out = struct_port_prop_huh(w_struct)
return w_out is not None
@expose("input-port?", [values.W_Object], simple=True)
def input_port_huh(a):
if isinstance(a, values.W_InputPort):
return values.w_true
elif isinstance(a, values_struct.W_Struct):
if struct_input_port_huh(a):
return values.w_true
return values.w_false
@expose("datum-intern-literal", [values.W_Object])
def datum_intern_literal(v):
return v
@expose("byte?", [values.W_Object])
def byte_huh(val):
if isinstance(val, values.W_Fixnum):
return values.W_Bool.make(0 <= val.value <= 255)
return values.w_false
@expose("regexp?", [values.W_Object])
def regexp_huh(r):
if isinstance(r, values_regex.W_Regexp) or isinstance(r, values_regex.W_PRegexp):
return values.w_true
return values.w_false
@expose("pregexp?", [values.W_Object])
def pregexp_huh(r):
if isinstance(r, values_regex.W_PRegexp):
return values.w_true
return values.w_false
@expose("byte-regexp?", [values.W_Object])
def byte_regexp_huh(r):
if isinstance(r, values_regex.W_ByteRegexp) or isinstance(r, values_regex.W_BytePRegexp):
return values.w_true
return values.w_false
@expose("byte-pregexp?", [values.W_Object])
def byte_pregexp_huh(r):
if isinstance(r, values_regex.W_BytePRegexp):
return values.w_true
return values.w_false
@expose("true-object?", [values.W_Object])
def true_object_huh(val):
if val is values.w_true:
return values.w_true
return values.w_false
@expose("procedure?", [values.W_Object])
def procedurep(n):
return values.W_Bool.make(n.iscallable())
@expose("syntax-original?", [values.W_Object], only_old=True)
def syntax_original(v):
return values.w_false
@expose("syntax-tainted?", [values.W_Object], only_old=True)
def syntax_tainted(v):
return values.w_false
@expose("syntax-source-module", [values.W_Object, default(values.W_Object, values.w_false)], only_old=True)
def syntax_source_module(stx, src):
# XXX Obviously not correct
return values.W_ResolvedModulePath(values.W_Symbol.make("fake symbol"))
@expose("srcloc->string", [values.W_Object])
def srcloc_to_string(obj):
return values.w_false
expose_val("null", values.w_null)
expose_val("true", values.w_true)
expose_val("false", values.w_false)
expose_val("break-enabled-key", values.break_enabled_key)
expose_val("exception-handler-key", values.exn_handler_key)
# FIXME: need stronger guards for all of these
for name in ["prop:evt",
"prop:impersonator-of",
"prop:method-arity-error"]:
expose_val(name, values_struct.W_StructProperty(
values.W_Symbol.make(name), values.w_false))
for name in ["exn:srclocs",
"custom-print-quotable"]:
prop = values_struct.W_StructProperty(values.W_Symbol.make(name), values.w_false)
expose_val("prop:"+name, prop)
expose_val(name+"?", values_struct.W_StructPropertyPredicate(prop))
expose_val(name+"-accessor", values_struct.W_StructPropertyAccessor(prop))
expose_val("prop:authentic", values_struct.w_prop_authentic)
expose_val("prop:sealed", values_struct.w_prop_sealed)
expose_val("prop:object-name", values_struct.w_prop_object_name)
expose_val("prop:procedure", values_struct.w_prop_procedure)
expose_val("prop:checked-procedure", values_struct.w_prop_checked_procedure)
expose_val("prop:arity-string", values_struct.w_prop_arity_string)
expose_val("prop:incomplete-arity", values_struct.w_prop_incomplete_arity)
expose_val("prop:custom-write", values_struct.w_prop_custom_write)
expose_val("prop:equal+hash", values_struct.w_prop_equal_hash)
expose_val("prop:chaperone-unsafe-undefined",
values_struct.w_prop_chaperone_unsafe_undefined)
expose_val("prop:set!-transformer", values_struct.w_prop_set_bang_transformer, only_old=True)
expose_val("prop:rename-transformer", values_struct.w_prop_rename_transformer, only_old=True)
expose_val("prop:expansion-contexts", values_struct.w_prop_expansion_contexts, only_old=True)
expose_val("prop:output-port", values_struct.w_prop_output_port)
expose_val("prop:input-port", values_struct.w_prop_input_port)
@continuation
def check_cont(proc, v, v1, v2, app, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
val = check_one_val(_vals)
if val is not values.w_false:
return v.ref_with_extra_info(1, app, env, cont)
return proc.call([v, v1, v2], env, cont)
@continuation
def receive_first_field(proc, v, v1, v2, app, env, cont, _vals):
from pycket.interpreter import check_one_val
first_field = check_one_val(_vals)
return first_field.call([v1, v2], env,
check_cont(proc, v, v1, v2, app, env, cont))
@expose("checked-procedure-check-and-extract",
[values_struct.W_StructType, values.W_Object, procedure,
values.W_Object, values.W_Object], simple=False, extra_info=True)
@jit.unroll_safe
def do_checked_procedure_check_and_extract(type, v, proc, v1, v2, env, cont, calling_app):
from pycket.interpreter import check_one_val, return_value
if isinstance(v, values_struct.W_RootStruct):
struct_type = jit.promote(v.struct_type())
if type.has_subtype(struct_type):
offset = struct_type.get_offset(type)
assert offset != -1
return v.ref_with_extra_info(offset, calling_app, env,
receive_first_field(proc, v, v1, v2, calling_app, env, cont))
return proc.call([v, v1, v2], env, cont)
################################################################
# printing
@expose("system-library-subpath", [default(values.W_Object, values.w_false)])
def sys_lib_subpath(mode):
# Pycket is 64bit only a.t.m.
if w_system_sym == w_windows_sym:
return values.W_Path(r"win32\\x86_64")
elif w_system_sym == w_macosx_sym:
return values.W_Path("x86_64-macosx")
else:
# FIXME: pretend all unicies are linux for now
return values.W_Path("x86_64-linux")
@expose("primitive-closure?", [values.W_Object])
def prim_clos(v):
return values.w_false
################################################################
# built-in struct types
def define_struct(name, w_super=values.w_null, fields=[]):
immutables = range(len(fields))
symname = values.W_Symbol.make(name)
w_struct_type = values_struct.W_StructType.make_simple(
w_name=symname,
w_super_type=w_super,
init_field_count=len(fields),
auto_field_count=0,
immutables=immutables)
expose_val("struct:" + name, w_struct_type)
expose_val(name, w_struct_type.constructor)
# this is almost always also provided
expose_val("make-" + name, w_struct_type.constructor)
expose_val(name + "?", w_struct_type.predicate)
struct_acc = w_struct_type.accessor
for field, field_name in enumerate(fields):
w_name = values.W_Symbol.make(field_name)
acc = values_struct.W_StructFieldAccessor(struct_acc, field, w_name)
expose_val(name + "-" + field_name, acc)
return w_struct_type
exn = \
define_struct("exn", values.w_null, ["message", "continuation-marks"])
exn_fail = \
define_struct("exn:fail", exn)
exn_fail_contract = \
define_struct("exn:fail:contract", exn_fail)
exn_fail_contract_arity = \
define_struct("exn:fail:contract:arity", exn_fail)
exn_fail_contract_divide_by_zero = \
define_struct("exn:fail:contract:divide-by-zero", exn_fail)
exn_fail_contract_non_fixnum_result = \
define_struct("exn:fail:contract:non-fixnum-result", exn_fail)
exn_fail_contract_continuation = \
define_struct("exn:fail:contract:continuation", exn_fail)
exn_fail_contract_variable = \
define_struct("exn:fail:contract:variable", exn_fail, ["id"])
exn_fail_syntax = \
define_struct("exn:fail:syntax", exn_fail, ["exprs"])
exn_fail_syntax_unbound = \
define_struct("exn:fail:syntax:unbound", exn_fail_syntax)
exn_fail_syntax_missing_module = \
define_struct("exn:fail:syntax:missing-module", exn_fail_syntax, ["path"])
exn_fail_read = \
define_struct("exn:fail:read", exn_fail, ["srclocs"])
exn_fail_read_eof = \
define_struct("exn:fail:read:eof", exn_fail_read)
exn_fail_read_non_char = \
define_struct("exn:fail:read:non-char", exn_fail_read)
exn_fail_fs = \
define_struct("exn:fail:filesystem", exn_fail)
exn_fail_fs_exists = \
define_struct("exn:fail:filesystem:exists", exn_fail_fs)
exn_fail_fs_version = \
define_struct("exn:fail:filesystem:version", exn_fail_fs)
exn_fail_fs_errno = \
define_struct("exn:fail:filesystem:errno", exn_fail_fs, ["errno"])
exn_fail_fs_missing_module = \
define_struct("exn:fail:filesystem:missing-module", exn_fail_fs, ["path"])
exn_fail_network = \
define_struct("exn:fail:network", exn_fail)
exn_fail_network_errno = \
define_struct("exn:fail:network:errno", exn_fail_network, ["errno"])
exn_fail_out_of_memory = \
define_struct("exn:fail:out-of-memory", exn_fail)
exn_fail_unsupported = \
define_struct("exn:fail:unsupported", exn_fail)
exn_fail_user = \
define_struct("exn:fail:user", exn_fail)
exn_break = \
define_struct("exn:break", exn)
exn_break_hang_up = \
define_struct("exn:break:hang-up", exn_break)
exn_break_terminate = \
define_struct("exn:break:terminate", exn_break)
srcloc = define_struct("srcloc",
fields=["source", "line", "column", "position", "span"])
date_struct = define_struct("date", fields=["second",
"minute",
"hour",
"day",
"month",
"year",
"week-day",
"year-day",
"dst?"
"time-zone-offset"])
date_star_struct = define_struct("date*", date_struct,
fields=["nanosecond", "time-zone-name"])
arity_at_least = define_struct("arity-at-least", values.w_null, ["value"])
for args in [ ("char-symbolic?",),
("char-graphic?",),
("char-blank?",),
("char-iso-control?",),
("char-punctuation?",),
("char-upper-case?",),
("char-title-case?",),
("char-lower-case?",),
]:
make_dummy_char_pred(*args)
for args in [ ("subprocess?",),
("file-stream-port?",),
("terminal-port?",),
("byte-ready?",),
("char-ready?",),
("handle-evt?",),
("thread?",),
("thread-running?",),
("thread-dead?",),
("semaphore-try-wait?",),
("link-exists?",),
("chaperone-channel",),
("impersonate-channel",),
]:
define_nyi(*args)
@expose("unsafe-make-place-local", [values.W_Object])
def unsafe_make_place_local(v):
return values.W_MBox(v)
@expose("unsafe-place-local-ref", [values.W_MBox], simple=False)
def unsafe_make_place_local(p, env, cont):
return p.unbox(env, cont)
@expose("unsafe-place-local-set!", [values.W_MBox, values.W_Object], simple=False)
def unsafe_make_place_local(p, v, env, cont):
return p.set_box(v, env, cont)
@expose("set!-transformer?", [values.W_Object], only_old=True)
def set_bang_transformer(v):
if isinstance(v, values.W_AssignmentTransformer):
return values.w_true
elif isinstance(v, values_struct.W_RootStruct):
w_property = v.struct_type().read_property(
values_struct.w_prop_set_bang_transformer)
return values.W_Bool.make(w_property is not None)
else:
return values.w_false
@expose("object-name", [values.W_Object])
def object_name(v):
if isinstance(v, values.W_Prim):
return v.name
elif isinstance(v, values_regex.W_AnyRegexp) or isinstance(v, values.W_Port):
return v.obj_name()
return values_string.W_String.fromstr_utf8(v.tostring()) # XXX really?
@expose("find-main-config", [])
def find_main_config():
return values.w_false
@expose("version", [])
def version():
from pycket.env import w_version
version = w_version.get_version()
if version == '':
version = "old-pycket"
return values_string.W_String.fromascii("unknown version" if version is None else version)
@continuation
def sem_post_cont(sem, env, cont, vals):
sem.post()
from pycket.interpreter import return_multi_vals
return return_multi_vals(vals, env, cont)
@expose("call-with-semaphore", simple=False, extra_info=True)
def call_with_sem(args, env, cont, extra_call_info):
if len(args) < 2:
raise SchemeException("error call-with-semaphore")
sem = args[0]
f = args[1]
if len(args) == 2:
new_args = []
fail = None
else:
new_args = args[3:]
if args[2] is values.w_false:
fail = None
else:
fail = args[2]
assert isinstance(sem, values.W_Semaphore)
assert f.iscallable()
sem.wait()
return f.call_with_extra_info(new_args, env, sem_post_cont(sem, env, cont), extra_call_info)
c_thread = values.W_Thread()
@expose("current-thread", [])
def current_thread():
return c_thread
# FIXME : implementation
@expose("current-memory-use", [default(values.W_Object, values.w_false)])
def current_memory_use(mode):
# mode is : (or/c #f 'cumulative custodian?)
return values.W_Fixnum(1)
@expose("semaphore-post", [values.W_Semaphore])
def sem_post(s):
s.post()
@expose("semaphore-wait", [values.W_Semaphore])
def sem_wait(s):
s.wait()
@expose("procedure-rename", [procedure, values.W_Object])
def procedure_rename(p, n):
return p
@expose("procedure->method", [procedure])
def procedure_to_method(proc):
# TODO provide a real implementation
return proc
@jit.unroll_safe
def make_arity_list(arity, extra=None):
jit.promote(arity)
acc = values.w_null
if extra is not None:
acc = values.W_Cons.make(extra, acc)
for item in reversed(arity.arity_list):
i = values.W_Fixnum(item)
acc = values.W_Cons.make(i, acc)
return acc
@continuation
def proc_arity_cont(arity, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
val = check_one_val(_vals)
if not arity.arity_list:
return return_value(val, env, cont)
result = make_arity_list(arity, val)
return return_value(result, env, cont)
def arity_to_value(arity, env, cont):
from pycket.interpreter import return_value
if arity.at_least != -1:
val = [values.W_Fixnum(arity.at_least)]
constructor = arity_at_least.constructor
return constructor.call(val, env, proc_arity_cont(arity, env, cont))
if len(arity.arity_list) == 1:
item = values.W_Fixnum(arity.arity_list[0])
return return_value(item, env, cont)
result = make_arity_list(arity)
return return_value(result, env, cont)
@expose("procedure-arity", [procedure], simple=False)
@jit.unroll_safe
def do_procedure_arity(proc, env, cont):
arity = proc.get_arity()
return arity_to_value(arity, env, cont)
@expose("procedure-arity-mask", [procedure], simple=True)
@jit.unroll_safe
def do_procedure_arity_mask(proc):
arity = proc.get_arity()
return arity.arity_bits()
@make_procedure("default-read-handler",[values.W_InputPort, default(values.W_Object, None)], simple=False)
def default_read_handler(ip, src, env, cont):
# default to the "read" and "read-syntax" defined in the expander linklet
if src is None:
return prim_env[values.W_Symbol.make("read")].call([ip], env, cont)
else:
return prim_env[values.W_Symbol.make("read-syntax")].call([ip, src], env, cont)
@continuation
def get_read_handler_cont(env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
ip = check_one_val(_vals)
assert isinstance(ip, values.W_InputPort)
if ip.get_read_handler():
return return_value(ip.get_read_handler(), env, cont)
else:
return return_value(default_read_handler, env, cont)
@expose("port-read-handler", [values.W_Object, default(values.W_Procedure, None)], simple=False)
def do_port_read_handler(ip, proc, env, cont):
from pycket.interpreter import return_value
if not isinstance(ip, values.W_InputPort):
assert isinstance(ip, values_struct.W_Struct)
st = ip.struct_type()
return st.accessor.call([ip, values.W_Fixnum(0)], env, get_read_handler_cont(env, cont))
if proc is None:
#get
if ip.get_read_handler():
return return_value(ip.get_read_handler(), env, cont)
else:<|fim▁hole|> #set
if proc is default_read_handler:
ip.set_read_handler(default_read_handler)
else:
ip.set_read_handler(proc)
return return_value(values.w_void, env, cont)
@expose("procedure-arity?", [values.W_Object])
@jit.unroll_safe
def do_is_procedure_arity(n):
if isinstance(n, values.W_Fixnum):
return values.W_Bool.make(n.value >= 0)
elif (isinstance(n, values_struct.W_RootStruct) and
n.struct_type() is arity_at_least):
return values.w_true
elif isinstance(n, values.W_List) and n.is_proper_list():
for item in values.from_list_iter(n):
if not (isinstance(item, values.W_Fixnum) or
(isinstance(item, values_struct.W_RootStruct) and
item.struct_type() is arity_at_least)):
return values.w_false
return values.w_true
return values.w_false
@expose("procedure-arity-includes?",
[procedure, values.W_Integer, default(values.W_Object, values.w_false)])
def procedure_arity_includes(proc, k, kw_ok):
if kw_ok is values.w_false and isinstance(proc, values_struct.W_RootStruct):
w_prop_val = proc.struct_type().read_property(values_struct.w_prop_incomplete_arity)
if w_prop_val is not None:
return values.w_false
if isinstance(k, values.W_Integer):
try:
k_val = k.toint()
except OverflowError:
pass
else:
arity = proc.get_arity(promote=True)
return values.W_Bool.make(arity.arity_includes(k_val))
return values.w_false
@expose("procedure-result-arity", [procedure], simple=False)
def procedure_result_arity(proc, env, cont):
from pycket.interpreter import return_multi_vals
arity = proc.get_result_arity()
if arity is None:
return return_multi_vals(values.w_false, env, cont)
return arity_to_value(arity, env, cont)
@expose("procedure-reduce-arity", [procedure, values.W_Object, default(values.W_Object, None)])
def procedure_reduce_arity(proc, arity, e):
# FIXME : this code is all wrong
#assert isinstance(arity, Arity)
#proc.set_arity(arity)
return proc
@expose("procedure-reduce-arity-mask", [procedure, values.W_Fixnum, default(values.W_Object, values.w_false)])
def procedure_reduce_arity_mask(proc, mask, name):
import math
return proc # FIXME: do this without mutation
v = mask.value
# turn the given mask into an arity
if v < 0:
# it's an at least value
ar_value = int(math.log(abs(v))/math.log(2))
# for some reason the 2 argument log doesn't exist
ar = Arity([], ar_value)
else:
ar_value = int(math.log(v)/math.log(2))
ar = Arity([ar_value], -1)
# FIXME: what if the mask represents a list? see math_arity_cont
# FIXME: mutation is wrong!
proc.set_arity(ar)
return proc
@expose("procedure-struct-type?", [values_struct.W_StructType])
def do_is_procedure_struct_type(struct_type):
return values.W_Bool.make(struct_type.prop_procedure is not None)
@expose("procedure-extract-target", [procedure], simple=False)
def do_procedure_extract_target(proc, env, cont):
from pycket.interpreter import return_value
if not isinstance(proc, values_struct.W_RootStruct):
return return_value(values.w_false, env, cont)
struct_type = proc.struct_type()
prop_procedure = struct_type.prop_procedure
if isinstance(prop_procedure, values.W_Fixnum):
idx = prop_procedure.value
return struct_type.accessor.access(proc, idx, env, cont)
return return_value(values.w_false, env, cont)
@expose("variable-reference-constant?",
[values.W_VariableReference], simple=False)
def varref_const(varref, env, cont):
from pycket.interpreter import return_value
return return_value(values.W_Bool.make(not(varref.varref.is_mutable(env))),
env, cont)
@expose("variable-reference->resolved-module-path",
[values.W_VariableReference], only_old=True)
def varref_rmp(varref):
return values.W_ResolvedModulePath(values.W_Path(varref.varref.path))
@expose("variable-reference->module-source", [values.W_VariableReference], only_old=True)
def varref_ms(varref):
# FIXME: not implemented
return values.W_Symbol.make("dummy_module")
@expose("variable-reference->module-path-index", [values.W_VariableReference], only_old=True)
def varref_to_mpi(ref):
from pycket.interpreter import ModuleVar
if not isinstance(ref, ModuleVar):
return values.w_false
return values.W_ModulePathIndex()
@expose("variable-reference->module-base-phase", [values.W_VariableReference], only_old=True)
def varref_to_mbp(ref):
# XXX Obviously not correct
return values.W_Fixnum.ZERO
@expose("resolved-module-path-name", [values.W_ResolvedModulePath], only_old=True)
def rmp_name(rmp):
return rmp.name
def is_module_path(v):
if isinstance(v, values.W_Symbol):
# FIXME: not always right
return True
if isinstance(v, values.W_Path):
return True
if isinstance(v, values_string.W_String):
return True
if isinstance(v, values.W_List):
vs = values.from_list(v)
for p in vs:
if not is_module_path(p):
return False
return True
# FIXME
return False
@expose("module-path?", [values.W_Object], only_old=True)
def module_pathp(v):
return values.W_Bool.make(is_module_path(v))
@expose("values")
def do_values(args_w):
return values.Values.make(args_w)
@expose("call-with-values", [procedure] * 2, simple=False, extra_info=True)
def call_with_values (producer, consumer, env, cont, extra_call_info):
# FIXME: check arity
return producer.call_with_extra_info([], env, call_cont(consumer, env, cont), extra_call_info)
@continuation
def time_apply_cont(initial, initial_user, initial_gc, env, cont, vals):
from pycket.interpreter import return_multi_vals
final = time.time()
final_gc = current_gc_time()
final_user = time.clock()
ms = values.W_Fixnum(int((final - initial) * 1000))
ms_gc = values.W_Fixnum(int((final_gc - initial_gc)))
ms_user = values.W_Fixnum(int((final_user - initial_user) * 1000))
vals_w = vals.get_all_values()
results = values.Values.make([values.to_list(vals_w),
ms_user, ms, ms_gc])
return return_multi_vals(results, env, cont)
@jit.dont_look_inside
def current_gc_time():
if objectmodel.we_are_translated():
memory = rgc.get_stats(rgc.TOTAL_GC_TIME)
else:
memory = 0
return memory
@expose("time-apply", [procedure, values.W_List], simple=False, extra_info=True)
def time_apply(a, args, env, cont, extra_call_info):
initial = time.time()
initial_user = time.clock()
initial_gc = current_gc_time()
return a.call_with_extra_info(values.from_list(args),
env, time_apply_cont(initial, initial_user, initial_gc, env, cont),
extra_call_info)
@expose("apply", simple=False, extra_info=True)
def apply(args, env, cont, extra_call_info):
if len(args) < 2:
raise SchemeException("apply expected at least 2 arguments, given %s" % len(args))
fn = args[0]
if not fn.iscallable():
raise SchemeException("apply expected a procedure, got something else")
lst = args[-1]
try:
fn_arity = fn.get_arity(promote=True)
if fn_arity is Arity.unknown or fn_arity.at_least == -1:
unroll_to = 3
elif fn_arity.arity_list:
unroll_to = fn_arity.arity_list[-1]
else:
unroll_to = fn_arity.at_least + 7
rest = values.from_list(lst, unroll_to=unroll_to, force=True)
except SchemeException:
raise SchemeException(
"apply expected a list as the last argument, got something else")
args_len = len(args) - 1
assert args_len >= 0
others = args[1:args_len]
new_args = others + rest
return fn.call_with_extra_info(new_args, env, cont, extra_call_info)
@expose("make-semaphore", [default(values.W_Fixnum, values.W_Fixnum.ZERO)])
def make_semaphore(n):
return values.W_Semaphore(n.value)
@expose("semaphore-peek-evt", [values.W_Semaphore])
def sem_peek_evt(s):
return values.W_SemaphorePeekEvt(s)
@expose("not", [values.W_Object])
def notp(a):
return values.W_Bool.make(a is values.w_false)
@jit.elidable
def elidable_length(lst):
n = 0
while isinstance(lst, values.W_Cons):
n += 1
lst = lst.cdr()
return n
@objectmodel.always_inline
def unroll_pred(lst, idx, unroll_to=0):
if not jit.we_are_jitted():
return False
return not jit.isvirtual(lst) and idx > unroll_to
@jit.unroll_safe
def virtual_length(lst, unroll_to=0):
n = 0
while isinstance(lst, values.W_Cons):
if unroll_pred(lst, n, unroll_to):
return elidable_length(lst) + n
n += 1
lst = lst.cdr()
return n
@expose("length", [values.W_List])
def length(a):
if not a.is_proper_list():
raise SchemeException("length: not given a proper list (either cyclic or not null terminated)")
return values.W_Fixnum(virtual_length(a, unroll_to=2))
@expose("list")
def do_list(args):
return values.to_list(args)
@expose("list*")
def do_liststar(args):
if not args:
raise SchemeException("list* expects at least one argument")
return values.to_improper(args[:-1], args[-1])
@expose("assq", [values.W_Object, values.W_List])
def assq(a, b):
while isinstance(b, values.W_Cons):
head, b = b.car(), b.cdr()
if not isinstance(head, values.W_Cons):
raise SchemeException("assq: found a non-pair element")
if eq_prims.eqp_logic(a, head.car()):
return head
if b is not values.w_null:
raise SchemeException("assq: reached a non-pair")
return values.w_false
@expose("memq", [values.W_Object, values.W_List])
def memq(w_o, w_l):
while isinstance(w_l, values.W_Cons):
if eq_prims.eqp_logic(w_o, w_l.car()):
return w_l
w_l = w_l.cdr()
return values.w_false
@expose("memv", [values.W_Object, values.W_List])
def memv(w_o, w_l):
while isinstance(w_l, values.W_Cons):
if w_o.eqv(w_l.car()):
return w_l
w_l = w_l.cdr()
return values.w_false
@expose("cons", [values.W_Object, values.W_Object])
def do_cons(a, b):
return values.W_Cons.make(a, b)
def make_list_eater(name):
"""
For generating car, cdr, caar, cadr, etc...
"""
spec = name[1:-1]
unrolled = unroll.unrolling_iterable(reversed(spec))
contract = "pair?"
for letter in spec[1::-1]:
if letter == 'a':
contract = "(cons/c %s any/c)" % contract
elif letter == 'd':
contract = "(cons/c any/c %s)" % contract
else:
assert False, "Bad list eater specification"
@expose(name, [values.W_Object])
def process_list(_lst):
lst = _lst
for letter in unrolled:
if not isinstance(lst, values.W_Cons):
raise SchemeException("%s: expected %s given %s" % (name, contract, _lst))
if letter == 'a':
lst = lst.car()
elif letter == 'd':
lst = lst.cdr()
else:
assert False, "Bad list eater specification"
return lst
process_list.__name__ = "do_" + name
return process_list
def list_eater_names(n):
names = []
for i in range(n):
names = [n + 'a' for n in names] + [n + 'd' for n in names] + ['a', 'd']
return ["c%sr" % name for name in names]
for name in list_eater_names(4):
make_list_eater(name)
@expose("mlist")
def do_mlist(args):
return values.to_mlist(args)
@expose("mcons", [values.W_Object, values.W_Object])
def do_mcons(a, b):
return values.W_MCons(a,b)
@expose("mcar", [values.W_MCons])
def do_mcar(a):
return a.car()
@expose("mcdr", [values.W_MCons])
def do_mcdr(a):
return a.cdr()
@expose("set-mcar!", [values.W_MCons, values.W_Object])
def do_set_mcar(a, b):
a.set_car(b)
@expose("set-mcdr!", [values.W_MCons, values.W_Object])
def do_set_mcdr(a, b):
a.set_cdr(b)
@expose("map", simple=False, arity=Arity.geq(2))
def do_map(args, env, cont):
# XXX this is currently not properly jitted
if len(args) < 2:
raise SchemeException("map expected at least two argument, got %s"%len(args))
fn, lists = args[0], args[1:]
if not fn.iscallable():
raise SchemeException("map expected a procedure, got something else")
# FIXME: more errorchecking
assert len(args) >= 0
return map_loop(fn, lists, env, cont)
@loop_label
def map_loop(f, lists, env, cont):
from pycket.interpreter import return_value
lists_new = []
args = []
for l in lists:
if not isinstance(l, values.W_Cons):
if l is not values.w_null:
raise SchemeException("map: not given a proper list")
return return_value(values.w_null, env, cont)
args.append(l.car())
lists_new.append(l.cdr())
return f.call(args, env, map_first_cont(f, lists_new, env, cont))
@continuation
def map_first_cont(f, lists, env, cont, _vals):
from pycket.interpreter import check_one_val
val = check_one_val(_vals)
return map_loop(f, lists, env, map_cons_cont(f, lists, val, env, cont))
@continuation
def map_cons_cont(f, lists, val, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
rest = check_one_val(_vals)
return return_value(values.W_Cons.make(val, rest), env, cont)
@expose("for-each", simple=False, arity=Arity.geq(2))
@jit.unroll_safe
def for_each(args, env, cont):
from pycket.interpreter import return_value
if len(args) < 2:
raise SchemeException("for-each: expected at least a procedure and a list")
f = args[0]
if not f.iscallable():
raise SchemeException("for-each: expected a procedure, but got %s" % f)
ls = args[1:]
for l in ls:
if not l.is_proper_list():
raise SchemeException("for-each: expected a list, but got %s" % l)
return for_each_loop(f, ls, env, cont)
@loop_label
@jit.unroll_safe
def for_each_loop(func, args, env, cont):
from pycket.interpreter import return_value
nargs = jit.promote(len(args))
heads = [None] * nargs
tails = [None] * nargs
for i in range(nargs):
arg = args[i]
if arg is values.w_null:
for v in args:
if v is not values.w_null:
raise SchemeException("for-each: all lists must have same size")
return return_value(values.w_void, env, cont)
assert isinstance(arg, values.W_Cons)
heads[i] = arg.car()
tails[i] = arg.cdr()
return func.call(heads, env,
for_each_cont(func, tails, env, cont))
@continuation
def for_each_cont(func, tails, env, cont, _vals):
return for_each_loop(func, tails, env, cont)
@expose("andmap", simple=False, arity=Arity.geq(2))
def andmap(args, env, cont):
from pycket.interpreter import return_value
if len(args) < 2:
raise SchemeException("andmap: expected at least a procedure and a list")
f = args[0]
if not f.iscallable():
raise SchemeException("andmap: expected a procedure, but got %s"%f)
ls = args[1:]
for l in ls:
if not isinstance(l, values.W_List):
raise SchemeException("andmap: expected a list, but got %s"%l)
return return_value(values.w_void, env, andmap_cont(f, ls, env, cont))
@continuation
def andmap_cont(f, ls, env, cont, vals):
# XXX this is currently not properly jitted
from pycket.interpreter import return_value, check_one_val
val = check_one_val(vals)
if val is values.w_false:
return return_value(val, env, cont)
for l in ls:
if l is values.w_null:
return return_value(values.w_true, env, cont)
cars = [l.car() for l in ls]
cdrs = [l.cdr() for l in ls]
return f.call(cars, env, andmap_cont(f, cdrs, env, cont))
@expose("ormap", simple=False, arity=Arity.geq(2))
def ormap(args, env, cont):
from pycket.interpreter import return_value
if len(args) < 2:
raise SchemeException("ormap: expected at least a procedure and a list")
f = args[0]
if not f.iscallable():
raise SchemeException("ormap: expected a procedure, but got %s"%f)
ls = args[1:]
for l in ls:
if not isinstance(l, values.W_List):
raise SchemeException("ormap: expected a list, but got %s"%l)
return return_value(values.w_false, env, ormap_cont(f, ls, env, cont))
@continuation
def ormap_cont(f, ls, env, cont, vals):
# XXX this is currently not properly jitted
from pycket.interpreter import return_value, check_one_val
val = check_one_val(vals)
if val is not values.w_false:
return return_value(val, env, cont)
for l in ls:
if l is values.w_null:
return return_value(values.w_false, env, cont)
cars = [l.car() for l in ls]
cdrs = [l.cdr() for l in ls]
return f.call(cars, env, ormap_cont(f, cdrs, env, cont))
@expose("append", arity=Arity.geq(0))
@jit.look_inside_iff(
lambda l: jit.loop_unrolling_heuristic(l, len(l), values.UNROLLING_CUTOFF))
def append(lists):
if not lists:
return values.w_null
acc = lists[-1]
for i in range(len(lists) - 2, -1, -1):
curr = lists[i]
if not curr.is_proper_list():
raise SchemeException("append: expected proper list")
acc = append_two(curr, acc)
return acc
def append_two(l1, l2):
first = None
last = None
while isinstance(l1, values.W_Cons):
v = l1.clone()
if first is None:
first = v
else:
last._unsafe_set_cdr(v)
last = v
l1 = l1.cdr()
if last is None:
return l2
last._unsafe_set_cdr(l2)
return first
@expose("reverse", [values.W_List])
def reverse(w_l):
acc = values.w_null
while isinstance(w_l, values.W_Cons):
val, w_l = w_l.car(), w_l.cdr()
acc = values.W_Cons.make(val, acc)
if w_l is not values.w_null:
raise SchemeException("reverse: not given proper list")
return acc
@expose("void", arity=Arity.geq(0))
def do_void(args):
return values.w_void
@expose("make-ephemeron", [values.W_Object] * 2)
def make_ephemeron(key, val):
return values.W_Ephemeron(key, val)
@expose("ephemeron-value",
[values.W_Ephemeron, default(values.W_Object, values.w_false)])
def ephemeron_value(ephemeron, default):
v = ephemeron.get()
return v if v is not None else default
@expose("make-placeholder", [values.W_Object])
def make_placeholder(val):
return values.W_Placeholder(val)
@expose("placeholder-set!", [values.W_Placeholder, values.W_Object])
def placeholder_set(ph, datum):
ph.value = datum
return values.w_void
@expose("placeholder-get", [values.W_Placeholder])
def placeholder_get(ph):
return ph.value
@expose("make-hash-placeholder", [values.W_List])
def make_hash_placeholder(vals):
return values.W_HashTablePlaceholder([], [])
@expose("make-hasheq-placeholder", [values.W_List])
def make_hasheq_placeholder(vals):
return values.W_HashTablePlaceholder([], [])
@expose("make-hasheqv-placeholder", [values.W_List])
def make_hasheqv_placeholder(vals):
return values.W_HashTablePlaceholder([], [])
@expose("list?", [values.W_Object])
def listp(v):
return values.W_Bool.make(v.is_proper_list())
@expose("list-pair?", [values.W_Object])
def list_pair(v):
return values.W_Bool.make(isinstance(v, values.W_Cons) and v.is_proper_list())
def enter_list_ref_iff(lst, pos):
if jit.isconstant(lst) and jit.isconstant(pos):
return True
return jit.isconstant(pos) and pos <= 16
@jit.look_inside_iff(enter_list_ref_iff)
def list_ref_impl(lst, pos):
if pos < 0:
raise SchemeException("list-ref: negative index")
for i in range(pos):
lst = lst.cdr()
if not isinstance(lst, values.W_Cons):
raise SchemeException("list-ref: index out of range")
return lst.car()
@expose("list-ref", [values.W_Cons, values.W_Fixnum])
def list_ref(lst, pos):
return list_ref_impl(lst, pos.value)
@expose("unsafe-list-ref", [subclass_unsafe(values.W_Cons), values.W_Fixnum])
def unsafe_list_ref(lst, pos):
return list_ref_impl(lst, pos.value)
@expose("unsafe-list-tail", [subclass_unsafe(values.W_Object), values.W_Fixnum])
def unsafe_list_tail(lst, pos):
return list_tail_impl(lst, pos)
@expose("list-tail", [values.W_Object, values.W_Fixnum])
def list_tail(lst, pos):
return list_tail_impl(lst, pos)
def list_tail_impl(lst, pos):
start_pos = pos.value
while start_pos > 0:
if not isinstance(lst, values.W_Cons):
msg = "index too large for list" if lst is values.w_null else "index reaches a non-pair"
raise SchemeException("list-tail : %s\n -- lst : %s\n -- index : %s\n" % (msg, lst.tostring(), start_pos))
lst = lst.cdr()
start_pos -= 1
return lst
@expose("assoc", [values.W_Object, values.W_List, default(values.W_Object, values.w_false)])
def assoc(v, lst, is_equal):
if is_equal is not values.w_false:
raise SchemeException("assoc: using a custom equal? is not yet implemented")
while isinstance(lst, values.W_Cons):
c = lst.car()
if not isinstance(lst, values.W_Cons):
raise SchemeException("assoc: non-pair found in list: %s in %s" % (c.tostring(), lst.tostring()))
cc = c.car()
if v.equal(cc):
return c
lst = lst.cdr()
return values.w_false
@expose("current-seconds", [])
def current_seconds():
tick = int(time.time())
return values.W_Fixnum(tick)
@expose("current-inexact-milliseconds", [])
def curr_millis():
return values.W_Flonum(time.time() * 1000.0)
@expose("seconds->date", [values.W_Fixnum])
def seconds_to_date(s):
# TODO: Proper implementation
return values.w_false
def _error(args, is_user=False):
reason = ""
if len(args) == 1:
sym = args[0]
reason = "error: %s" % sym.tostring()
else:
first_arg = args[0]
if isinstance(first_arg, values_string.W_String):
from rpython.rlib.rstring import StringBuilder
msg = StringBuilder()
msg.append(first_arg.tostring())
v = args[1:]
for item in v:
msg.append(" %s" % item.tostring())
reason = msg.build()
else:
src = first_arg
form = args[1]
v = args[2:]
assert isinstance(src, values.W_Symbol)
assert isinstance(form, values_string.W_String)
reason = "%s: %s" % (
src.tostring(), input_output.format(form, v, "error"))
if is_user:
raise UserException(reason)
else:
raise SchemeException(reason)
@expose("error", arity=Arity.geq(1))
def error(args):
return _error(args, False)
@expose("raise-user-error", arity=Arity.geq(1))
def error(args):
return _error(args, True)
@expose("raise-arity-error", arity=Arity.geq(2))
def raise_arity_error(args):
return _error(args, False)
@expose("raise-result-arity-error", arity=Arity.geq(3))
def raise_result_arity_error(args):
return _error(args, False)
@expose("list->vector", [values.W_List])
def list2vector(l):
return values_vector.W_Vector.fromelements(values.from_list(l))
# FIXME: make this work with chaperones/impersonators
@expose("vector->list", [values.W_MVector], simple=False)
def vector2list(v, env, cont):
from pycket.interpreter import return_value
if isinstance(v, values_vector.W_Vector):
# Fast path for unproxied vectors
result = values.vector_to_improper(v, values.w_null)
return return_value(result, env, cont)
return vector_to_list_loop(v, v.length() - 1, values.w_null, env, cont)
@loop_label
def vector_to_list_loop(vector, idx, acc, env, cont):
from pycket.interpreter import return_value
if idx < 0:
return return_value(acc, env, cont)
return vector.vector_ref(idx, env,
vector_to_list_read_cont(vector, idx, acc, env, cont))
@continuation
def vector_to_list_read_cont(vector, idx, acc, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
val = check_one_val(_vals)
acc = values.W_Cons.make(val, acc)
return vector_to_list_loop(vector, idx - 1, acc, env, cont)
# Unsafe pair ops
@expose("unsafe-car", [subclass_unsafe(values.W_Cons)])
def unsafe_car(p):
return p.car()
@expose("unsafe-mcar", [subclass_unsafe(values.W_MCons)])
def unsafe_mcar(p):
return p.car()
@expose("unsafe-cdr", [subclass_unsafe(values.W_Cons)])
def unsafe_cdr(p):
return p.cdr()
@expose("unsafe-mcdr", [subclass_unsafe(values.W_MCons)])
def unsafe_mcdr(p):
return p.cdr()
@continuation
def struct_port_loc_cont(input_huh, env, cont, _vals):
from pycket.interpreter import check_one_val, return_multi_vals
pr = check_one_val(_vals)
if not isinstance(pr, values.W_Port):
if input_huh:
# empty string input port is used for prop:input-port
pr = values.W_StringInputPort("")
else:
# a port that discards all data is used for prop:output-port
pr = values.W_StringOutputPort()
assert isinstance(pr, values.W_Port)
lin = pr.get_line()
col = pr.get_column()
pos = pr.get_position()
return return_multi_vals(values.Values.make([lin, col, pos]), env, cont)
@expose("port-next-location", [values.W_Object], simple=False)
def port_next_loc(p, env, cont):
from pycket.interpreter import return_multi_vals
lin = col = pos = values.w_false
if isinstance(p, values_struct.W_Struct):
i, o = struct_port_prop_huh(p)
if (i is None) and (o is None):
raise SchemeException("given struct doesn't have neither prop:input-port nor prop:output-port")
if i:
if isinstance(i, values.W_InputPort):
lin = i.get_line()
col = i.get_column()
pos = i.get_position()
elif isinstance(i, values.W_Fixnum):
port_index = i.value
return p.struct_type().accessor.call([p, values.W_Fixnum(port_index)], env, struct_port_loc_cont(True, env, cont))
else:
raise SchemeException("invalid value %s for prop:input-port of the given struct : %s" % (i, p.tostring()))
elif o:
if isinstance(o, values.W_OutputPort):
lin = o.get_line()
col = o.get_column()
pos = o.get_position()
elif isinstance(o, values.W_Fixnum):
port_index = o.value
return p.struct_type().accessor.call([p, values.W_Fixnum(port_index)], env, struct_port_loc_cont(False, env, cont))
else:
raise SchemeException("invalid value %s for prop:output-port of the given struct : %s" % (o, p.tostring()))
else:
assert isinstance(p, values.W_Port)
lin = p.get_line()
col = p.get_column()
pos = p.get_position()
return return_multi_vals(values.Values.make([lin, col, pos]), env, cont)
@expose("port-writes-special?", [values.W_Object])
def port_writes_special(v):
return values.w_false
@expose("port-writes-atomic?", [values.W_Object])
def port_writes_atomic(v):
return values.w_false
@expose("port-provides-progress-evts?", [values.W_Object])
def port_ppe(v):
return values.w_false
@expose("file-position*", [values.W_Object])
def file_pos_star(v):
return values.w_false
@expose("symbol-unreadable?", [values.W_Symbol])
def sym_unreadable(v):
if v.unreadable:
return values.w_true
return values.w_false
@expose("symbol-interned?", [values.W_Symbol])
def string_to_symbol(v):
return values.W_Bool.make(v.is_interned())
@expose("symbol<?", arity=Arity.geq(1))
def symbol_lt(args):
name = "symbol<?"
if len(args) < 2:
raise SchemeException(name + ": requires at least 2 arguments")
head = args[0]
if not isinstance(head, values.W_Symbol):
raise SchemeException(name + ": not given a string")
for i in range(1, len(args)):
t = args[i]
if not isinstance(t, values.W_Symbol):
raise SchemeException(name + ": not given a string")
# FIXME: shouldn't need to convert to W_String
# but this is much easier than recreating the logic
if string.symbol_to_string_impl(head).cmp(string.symbol_to_string_impl(t)) >= 0:
return values.w_false
head = t
return values.w_true
@expose("immutable?", [values.W_Object])
def immutable(v):
return values.W_Bool.make(v.immutable())
@expose("make-thread-cell",
[values.W_Object, default(values.W_Bool, values.w_false)])
def make_thread_cell(v, pres):
return values.W_ThreadCell(v, False if pres is values.w_false else True)
@expose("thread-cell-ref", [values.W_ThreadCell])
def thread_cell_ref(cell):
return cell.value
@expose("thread-cell-set!", [values.W_ThreadCell, values.W_Object])
def thread_cell_set(cell, v):
cell.value = v
return values.w_void
@expose("current-preserved-thread-cell-values",
[default(values.W_ThreadCellValues, None)])
def current_preserved_thread_cell_values(v):
# Generate a new thread-cell-values object
if v is None:
return values.W_ThreadCellValues()
# Otherwise, we restore the values
for cell, val in v.assoc.iteritems():
assert cell.preserved
cell.value = val
return values.w_void
@expose("place-enabled?")
def do_is_place_enabled(args):
return values.w_false
@expose("gensym", [default(values.W_Object, values.W_Symbol.make("g"))])
def gensym(init):
from pycket.interpreter import Gensym
if not isinstance(init, values.W_Symbol) and not isinstance(init, values_string.W_String):
raise SchemeException("gensym exptected a string or symbol but got : %s" % init.tostring())
gensym_key = init.tostring()
return Gensym.gensym(gensym_key)
@expose("keyword<?", [values.W_Keyword, values.W_Keyword])
def keyword_less_than(a_keyword, b_keyword):
return values.W_Bool.make(a_keyword.value < b_keyword.value)
initial_env_vars = values.W_EnvVarSet({}, True)
expose_val("current-environment-variables", values_parameter.W_Parameter(initial_env_vars))
@expose("environment-variables-ref", [values.W_EnvVarSet, values.W_Bytes])
def env_var_ref(set, name):
r = set.get(name.as_str())
if r is None:
return values.w_false
else:
return values.W_Bytes.from_string(r)
@expose("environment-variables-set!", [values.W_EnvVarSet, values.W_Bytes, values.W_Bytes, default(values.W_Object, None)])
def env_var_ref(set, name, val, fail):
return set.set(name.as_str(), val.as_str())
@expose("make-environment-variables")
def make_env_var(args):
return values.W_EnvVarSet({}, False)
@expose("environment-variables-names", [values.W_EnvVarSet])
def env_var_names(set):
names = set.get_names()
return values.to_list([values.W_Bytes.from_string(n) for n in names])
@expose("check-for-break", [])
def check_for_break():
return values.w_false
@expose("find-system-path", [values.W_Symbol], simple=True)
def find_sys_path(kind):
return racket_sys_paths.get_path(kind)
@expose("find-main-collects", [])
def find_main_collects():
return values.w_false
@expose("module-path-index-join",
[values.W_Object, values.W_Object, default(values.W_Object, None)], only_old=True)
def mpi_join(a, b, c):
return values.W_ModulePathIndex()
@expose("module-path-index-resolve",
[values.W_ModulePathIndex], only_old=True)
def mpi_resolve(a):
return values.W_ResolvedModulePath(values.W_Path("."))
# Loading
# FIXME: Proper semantics.
@expose("load", [values_string.W_String], simple=False, only_old=True)
def load(lib, env, cont):
from pycket.expand import ensure_json_ast_run
lib_name = lib.tostring()
json_ast = ensure_json_ast_run(lib_name)
if json_ast is None:
raise SchemeException(
"can't gernerate load-file for %s " % lib.tostring())
#ast = load_json_ast_rpython(json_ast)
raise NotImplementedError(
"would crash anyway when trying to interpret the Module")
#return ast, env, cont
expose_val("current-load-relative-directory", values_parameter.W_Parameter(values.w_false))
expose_val("current-write-relative-directory", values_parameter.W_Parameter(values.w_false))
initial_security_guard = values.W_SecurityGuard()
expose_val("current-security-guard", values_parameter.W_Parameter(initial_security_guard))
@expose("make-security-guard", [values.W_SecurityGuard, values.W_Procedure, values.W_Procedure, default(values.W_Procedure, values.w_false)])
def make_security_guard(parent, file, network, link):
return values.W_SecurityGuard()
@expose("unsafe-make-security-guard-at-root")
def unsafe_make_sec_guard(args):
return values.W_SecurityGuard()
@make_procedure("current-directory-guard", [values.W_Object], simple=False)
def current_directory_guard(path, env, cont):
from pycket.interpreter import return_value
# "cd"s at the os level
if not (isinstance(path, values_string.W_String) or isinstance(path, values.W_Path)):
raise SchemeException("current-directory: exptected a path-string? as argument 0, but got : %s" % path.tostring())
path_str = input_output.extract_path(path)
# if path is a complete-path?, set it
if path_str[0] == os.path.sep:
new_current_dir = path_str
else: # relative to the current one
current_dir = current_directory_param.get(cont)
current_path_str = input_output.extract_path(current_dir)
# let's hope that there's no symbolic links etc.
new_current_dir = os.path.normpath(os.path.sep.join([current_path_str, path_str]))
try:
os.chdir(new_current_dir)
except OSError:
raise SchemeException("path doesn't exist : %s" % path_str)
out_port = input_output.current_out_param.get(cont)
assert isinstance(out_port, values.W_OutputPort)
out_port.write("; now in %s\n" % new_current_dir)
return return_value(values.W_Path(new_current_dir), env, cont)
current_directory_param = values_parameter.W_Parameter(values.W_Path(os.getcwd()), current_directory_guard)
expose_val("current-directory", current_directory_param)
w_unix_sym = values.W_Symbol.make("unix")
w_windows_sym = values.W_Symbol.make("windows")
w_macosx_sym = values.W_Symbol.make("macosx")
_platform = sys.platform
def detect_platform():
if _platform == "darwin":
return w_macosx_sym
elif _platform in ['win32', 'cygwin']:
return w_windows_sym
else:
return w_unix_sym
w_system_sym = detect_platform()
w_os_sym = values.W_Symbol.make("os")
w_os_so_suffix = values.W_Symbol.make("so-suffix")
w_os_so_mode_sym = values.W_Symbol.make("so-mode")
w_fs_change_mode = values.W_Symbol.make("fs-change")
w_local_mode = values.W_Symbol.make("local")
w_unix_so_suffix = values.W_Bytes.from_string(".so")
w_word_sym = values.W_Symbol.make("word")
w_link_sym = values.W_Symbol.make("link")
w_vm_sym = values.W_Symbol.make("vm")
w_gc_sym = values.W_Symbol.make("gc")
w_machine_sym = values.W_Symbol.make("machine")
w_cross_sym = values.W_Symbol.make("cross")
w_fs_supported = values.W_Symbol.make("supported")
w_fs_scalable = values.W_Symbol.make("scalable")
w_fs_low_latency = values.W_Symbol.make("low-latency")
w_fs_file_level = values.W_Symbol.make("file-level")
w_target_machine_sym = values.W_Symbol.make("target-machine")
def system_type(w_what):
# os
if w_what is w_os_sym:
return w_system_sym
# word
if w_what is w_word_sym:
#return values.W_Fixnum(8*struct.calcsize("P"))
return values.W_Fixnum(64)
# vm
if w_what is w_vm_sym:
return values.W_Symbol.make("pycket")
# gc
if w_what is w_gc_sym:
return values.W_Symbol.make("3m") # ??
# link
#
# 'static (Unix)
# 'shared (Unix)
# 'dll (Windows)
# 'framework (Mac OS)
if w_what is w_link_sym:
return values.W_Symbol.make("static")
# machine
if w_what is w_machine_sym:
return values_string.W_String.make("further details about the current machine in a platform-specific format")
# so-suffix
if w_what is w_os_so_suffix:
return w_unix_so_suffix
# so-mode
if w_what is w_os_so_mode_sym:
return w_local_mode
# fs-change
if w_what is w_fs_change_mode:
from pycket.prims.vector import vector
w_f = values.w_false
# FIXME: Is there a way to get this info from sys or os?
if w_system_sym is w_unix_sym:
return vector([w_fs_supported, w_fs_scalable, w_f, w_fs_file_level])
else:
return vector([w_f, w_f, w_f, w_f])
# cross
if w_what is w_cross_sym:
return values.W_Symbol.make("infer")
# cross
if w_what is w_target_machine_sym:
return values.W_Symbol.make("pycket")
raise SchemeException("unexpected system-type symbol '%s" % w_what.utf8value)
expose("system-type", [default(values.W_Symbol, w_os_sym)])(system_type)
def system_path_convention_type():
if w_system_sym is w_windows_sym:
return w_windows_sym
else:
return w_unix_sym
expose("system-path-convention-type", [])(system_path_convention_type)
@expose("bytes->path", [values.W_Bytes, default(values.W_Symbol, system_path_convention_type())])
def bytes_to_path(bstr, typ):
# FIXME : ignores the type, won't work for windows
return values.W_Path(bstr.as_str())
major_gc_sym = values.W_Symbol.make("major")
minor_gc_sym = values.W_Symbol.make("minor")
incremental_gc_sym = values.W_Symbol.make("incremental")
@expose("collect-garbage", [default(values.W_Symbol, major_gc_sym)])
@jit.dont_look_inside
def do_collect_garbage(request):
from rpython.rlib import rgc
rgc.collect()
return values.w_void
@continuation
def vec2val_cont(vals, vec, n, s, l, env, cont, new_vals):
from pycket.interpreter import return_multi_vals, check_one_val
new = check_one_val(new_vals)
vals[n] = new
if s+n+1 == l:
return return_multi_vals(values.Values.make(vals), env, cont)
else:
return vec.vector_ref(s+n+1, env, vec2val_cont(vals, vec, n+1, s, l, env, cont))
@expose("vector->values", [values_vector.W_Vector,
default(values.W_Fixnum, values.W_Fixnum.ZERO),
default(values.W_Fixnum, None)],
simple=False)
def vector_to_values(v, start, end, env, cont):
from pycket.interpreter import return_multi_vals
l = end.value if end else v.length()
s = start.value
if s == l:
return return_multi_vals(values.Values.make([]), env, cont)
else:
vals = [None] * (l - s)
return v.vector_ref(s, env, vec2val_cont(vals, v, 0, s, l, env, cont))
class ReaderGraphBuilder(object):
def __init__(self):
self.state = {}
def reader_graph_loop_cons(self, v):
assert isinstance(v, values.W_Cons)
p = values.W_WrappedConsMaybe(values.w_unsafe_undefined, values.w_unsafe_undefined)
self.state[v] = p
car = self.reader_graph_loop(v.car())
cdr = self.reader_graph_loop(v.cdr())
p._car = car
p._cdr = cdr
# FIXME: should change this to say if it's a proper list now ...
return p
def reader_graph_loop_vector(self, v):
assert isinstance(v, values_vector.W_Vector)
len = v.length()
p = values_vector.W_Vector.fromelement(values.w_false, len)
self.state[v] = p
for i in range(len):
vi = v.ref(i)
p.set(i, self.reader_graph_loop(vi))
return p
def reader_graph_loop_struct(self, v):
assert isinstance(v, values_struct.W_Struct)
type = v.struct_type()
if not type.isprefab:
return v
size = v._get_size_list()
p = values_struct.W_Struct.make_n(size, type)
self.state[v] = p
for i in range(size):
val = self.reader_graph_loop(v._ref(i))
p._set_list(i, val)
return p
def reader_graph_loop_proxy(self, v):
assert v.is_proxy()
inner = self.reader_graph_loop(v.get_proxied())
p = v.replace_proxied(inner)
self.state[v] = p
return p
def reader_graph_loop_equal_hash(self, v):
from pycket.hash.equal import W_EqualHashTable
assert isinstance(v, W_EqualHashTable)
empty = v.make_empty()
self.state[v] = empty
for key, val in v.hash_items():
key = self.reader_graph_loop(key)
val = self.reader_graph_loop(val)
empty._set(key, val)
return empty
def reader_graph_loop(self, v):
assert v is not None
from pycket.hash.equal import W_EqualHashTable
if v in self.state:
return self.state[v]
if v.is_proxy():
return self.reader_graph_loop_proxy(v)
if isinstance(v, values.W_Cons):
return self.reader_graph_loop_cons(v)
if isinstance(v, values_vector.W_Vector):
return self.reader_graph_loop_vector(v)
if isinstance(v, values_struct.W_Struct):
return self.reader_graph_loop_struct(v)
if isinstance(v, W_EqualHashTable):
return self.reader_graph_loop_equal_hash(v)
if isinstance(v, values.W_Placeholder):
return self.reader_graph_loop(v.value)
# XXX FIXME: doesn't handle stuff
return v
@expose("make-reader-graph", [values.W_Object])
@jit.dont_look_inside
def make_reader_graph(v):
from rpython.rlib.nonconst import NonConstant
builder = ReaderGraphBuilder()
if NonConstant(False):
# XXX JIT seems be generating questionable code when the argument of
# make-reader-graph is a virtual cons cell. The car and cdr fields get
# set by the generated code after the call, causing reader_graph_loop to
# crash. I suspect the problem has to do with the translators effect analysis.
# Example:
# p29 = new_with_vtable(descr=<SizeDescr 24>)
# p31 = call_r(ConstClass(make_reader_graph), p29, descr=<Callr 8 r EF=5>)
# setfield_gc(p29, p15, descr=<FieldP pycket.values.W_WrappedCons.inst__car 8 pure>)
# setfield_gc(p29, ConstPtr(ptr32), descr=<FieldP pycket.values.W_WrappedCons.inst__cdr 16 pure>)
if isinstance(v, values.W_WrappedCons):
print v._car.tostring()
print v._cdr.tostring()
return builder.reader_graph_loop(v)
@expose("procedure-specialize", [procedure])
def procedure_specialize(proc):
from pycket.ast_visitor import copy_ast
# XXX This is the identity function simply for compatibility.
# Another option is to wrap closures in a W_PromotableClosure, which might
# get us a similar effect from the RPython JIT.
if not isinstance(proc, values.W_Closure1AsEnv):
return proc
code = copy_ast(proc.caselam)
vals = proc._get_full_list()
new_closure = values.W_Closure1AsEnv.make(vals, code, proc._prev)
return proc
@expose("processor-count", [])
def processor_count():
return values.W_Fixnum.ONE
cached_values = {}
@continuation
def thunk_cont(index, env, cont, _vals):
from pycket.interpreter import check_one_val, return_value
val = check_one_val(_vals)
cached_values[index] = val
return return_value(val, env, cont)
@expose("cache-configuration", [values.W_Fixnum, values.W_Object], simple=False)
def cache_configuration(index, proc, env, cont):
from pycket.interpreter import return_value
if index in cached_values:
return return_value(cached_values[index], env, cont)
return proc.call([], env, thunk_cont(index, env, cont))
@expose("make-readtable", [values.W_Object, values.W_Character, values.W_Symbol, procedure], only_old=True)
def make_readtable(parent, char, sym, proc):
print "making readtable", [parent, char, sym, proc]
return values.W_ReadTable(parent, char, sym, proc)
@expose("read/recursive", only_old=True)
def read_recursive(args):
return values.w_false
def make_stub_predicates(names):
for name in names:
message = "%s: not yet implemented" % name
@expose(name, [values.W_Object])
def predicate(obj):
if not objectmodel.we_are_translated():
print message
return values.w_false
predicate.__name__ = "stub_predicate(%s)" % name
def make_stub_predicates_no_linklet():
STUB_PREDICATES_NO_LINKLET = ["namespace-anchor?",
"rename-transformer?",
"readtable?",
"liberal-define-context?",
"compiled-expression?",
"special-comment?",
"internal-definition-context?",
"namespace?",
"compiled-module-expression?"]
make_stub_predicates(STUB_PREDICATES_NO_LINKLET)
if not w_global_config.is_expander_loaded():
make_stub_predicates_no_linklet()
@expose("unsafe-start-atomic", [])
def unsafe_start_atomic():
return values.w_void
@expose("unsafe-start-breakable-atomic", [])
def unsafe_start_atomic():
return values.w_void
@expose("unsafe-end-breakable-atomic", [])
def unsafe_start_atomic():
return values.w_void
@expose("unsafe-end-atomic", [])
def unsafe_start_atomic():
return values.w_void
@expose("__dummy-function__", [])
def __dummy__():
from rpython.rlib.rbigint import ONERBIGINT
from rpython.rlib.runicode import str_decode_utf_8
ex = ONERBIGINT.touint()
print ex
@expose("primitive-table", [values.W_Object])
def primitive_table(v):
if v not in select_prim_table:
return values.w_false
if v in prim_table_cache:
return prim_table_cache[v]
expose_env = {}
for prim_name_sym in select_prim_table[v]:
if prim_name_sym in prim_env:
expose_env[prim_name_sym] = prim_env[prim_name_sym]
table = make_simple_immutable_table(W_EqImmutableHashTable,
expose_env.keys(),
expose_env.values())
prim_table_cache[v] = table
return table
@expose("unquoted-printing-string", [values_string.W_String])
def up_string(s):
return values.W_UnquotedPrintingString(s)
@expose("unquoted-printing-string-value", [values.W_UnquotedPrintingString])
def ups_val(v):
return v.string
# Any primitive on Pycket can use "w_global_config.is_debug_active()"
# to control debug outputs (or breakpoints in the interpreter) (with
# an even greater output control with the console_log with verbosity
# levels)
@expose("pycket:activate-debug", [])
def activate_debug():
w_global_config.activate_debug()
@expose("pycket:deactivate-debug", [])
def activate_debug():
w_global_config.deactivate_debug()
@expose("pycket:is-debug-active", [])
def debug_status():
return values.W_Bool.make(w_global_config.is_debug_active())
# Maybe we should do it with just one Racket level parameter
@expose("pycket:get-verbosity", [])
def get_verbosity():
lvl = w_global_config.get_config_val('verbose')
return values.W_Fixnum(lvl)
@expose("pycket:set-verbosity", [values.W_Fixnum])
def set_verbosity(v):
w_global_config.set_config_val('verbose', v.value)
@expose("pycket:activate-keyword", [values.W_Symbol])
def activate_debug_keyword(v):
w_global_config.activate_keyword(v.variable_name())
@expose("pycket:deactivate-keyword", [values.W_Symbol])
def deactivate_debug_keyword(v):
w_global_config.deactivate_keyword(v.variable_name())
@expose("pycket:report-undefined-prims", [])
def report_undefined_prims():
from pycket.prims.primitive_tables import report_undefined_prims
report_undefined_prims()
addr_sym = values.W_Symbol.make("mem-address")
@expose("pycket:print", [values.W_Object, default(values.W_Symbol, addr_sym)])
def pycket_print(o, sym):
from pycket.util import console_log
if sym is addr_sym:
console_log("PYCKET:PRINT : %s" % o, debug=True)
else:
console_log("PYCKET:PRINT : %s" % o.tostring(), debug=True)
@expose("pycket:eq?", [values.W_Object, values.W_Object])
def pycket_eq(o1, o2):
return values.W_Bool.make(o1 is o2)
expose_val("error-print-width", values_parameter.W_Parameter(values.W_Fixnum.make(256)))
@expose("banner", [])
def banner():
from pycket.env import w_version
version = w_version.get_version()
return values_string.W_String.make("Welcome to Pycket %s.\n"%version)
executable_yield_handler = values_parameter.W_Parameter(do_void.w_prim)
expose_val("executable-yield-handler", executable_yield_handler)
current_load_extension = values_parameter.W_Parameter(do_void.w_prim)
expose_val("current-load-extension", current_load_extension)
@expose("system-language+country", [])
def lang_country():
return values_string.W_String.make("en_US.UTF-8")
@expose("unsafe-add-post-custodian-shutdown", [values.W_Object])
def add_post(p):
return values.w_void
@expose("make-will-executor", [])
def make_will_exec():
return values.W_WillExecutor()
@expose("will-register", [values.W_WillExecutor, values.W_Object, values.W_Object])
def will_register(w, v, p):
return values.w_void
@expose("will-execute", [values.W_WillExecutor])
def will_exec(w):
return values.w_void
@expose("will-try-execute", [values.W_WillExecutor, default(values.W_Object, values.w_false)])
def will_exec(w, v):
return v
@expose("thread", [values.W_Object])
def thread(p):
return values.W_Thread()
@expose("thread/suspend-to-kill", [values.W_Object])
def thread_susp(p):
return values.W_Thread()
@expose("make-channel", [])
def make_channel():
return values.W_Channel()
@expose("primitive-lookup", [values.W_Symbol], simple=True)
def primitive_lookup(sym):
return prim_env.get(sym, values.w_false)<|fim▁end|> | return return_value(default_read_handler, env, cont)
else: |
<|file_name|>command-test.js<|end_file_name|><|fim▁begin|>var Table = require ('../table.js');
var Command = require('../command.js');
require('../actions/report-action.js');
require('../actions/move-action.js');
require('../actions/right-action.js');
require('../actions/left-action.js');
require('../actions/place-action.js');
exports.Valid_PLACE = function(test){
var cmd = Command.GetCommand(['place', '1', '2', 'north']);
test.notEqual(cmd, null);
test.equal(cmd.data.x, 1);
test.equal(cmd.data.y, 2);
test.equal(cmd.data.f.getName(), 'NORTH');
test.done();
}
exports.Invalid_PLACE = function(test){
var cmd = Command.GetCommand(['place', '1', '2', 'northf']);
test.equal(cmd, null);
test.done();
}
exports.Valid_MOVE = function(test){
var cmd = Command.GetCommand(['MOVE']);
test.notEqual(cmd, null);
test.done();
}
exports.Valid_LEFT = function(test){
var cmd = Command.GetCommand(['left']);
test.notEqual(cmd, null);
test.done();
}
exports.Valid_RIGHT = function(test){
var cmd = Command.GetCommand(['Right']);
test.notEqual(cmd, null);
test.done();
}
exports.Invalid_Command = function(test){
var cmd = Command.GetCommand(['Oops']);
test.equal(cmd, null);
test.done();
}
exports.Valid_Execution = function(test){
// Create dummy Context
var ctx = {
table: new Table(5,5),
robot:null,
Feedback:{Show:function(msg){}},
Logger:{Log:function(msg){}}
}
Command.GetCommand(['place', '1', '1', 'east']).Execute(ctx);
Command.GetCommand(['move']).Execute(ctx);
Command.GetCommand(['left']).Execute(ctx);
Command.GetCommand(['move']).Execute(ctx);
Command.GetCommand(['move']).Execute(ctx);
test.equal(ctx.robot.x, 2);<|fim▁hole|> test.done();
}
exports.Valid_IgnoreFallingMove = function(test){
// Create dummy Context
var ctx = {
table: new Table(5,5),
robot:null,
Feedback:{Show:function(msg){}},
Logger:{Log:function(msg){}}
}
Command.GetCommand(['place', '4', '1', 'east']).Execute(ctx);
Command.GetCommand(['move']).Execute(ctx);
test.equal(ctx.robot.x, 4);
test.equal(ctx.robot.y, 1);
test.equal(ctx.robot.f.getName(), 'EAST');
test.done();
}<|fim▁end|> | test.equal(ctx.robot.y, 3);
test.equal(ctx.robot.f.getName(), 'NORTH'); |
<|file_name|>update_placements.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>"""This code example updates a single placement to allow for AdSense targeting.
To determine which placements exist, run get_all_placements.py.
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
PLACEMENT_ID = 'INSERT_PLACEMENT_ID_HERE'
def main(client, placement_id):
# Initialize appropriate service.
placement_service = client.GetService('PlacementService', version='v201411')
# Create query.
values = [{
'key': 'placementId',
'value': {
'xsi_type': 'NumberValue',
'value': placement_id
}
}]
query = 'WHERE id = :placementId'
statement = dfp.FilterStatement(query, values, 1)
# Get placements by statement.
placements = placement_service.getPlacementsByStatement(
statement.ToStatement())
for placement in placements:
if not placement['targetingDescription']:
placement['targetingDescription'] = 'Generic description'
placement['targetingAdLocation'] = 'All images on sports pages.'
placement['targetingSiteName'] = 'http://code.google.com'
placement['isAdSenseTargetingEnabled'] = 'true'
# Update placements remotely.
placements = placement_service.updatePlacements(placements)
for placement in placements:
print ('Placement with id \'%s\', name \'%s\', and AdSense targeting '
'enabled \'%s\' was updated.'
% (placement['id'], placement['name'],
placement['isAdSenseTargetingEnabled']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, PLACEMENT_ID)<|fim▁end|> | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
<|file_name|>DownloadTransformer.java<|end_file_name|><|fim▁begin|>package org.apache.cocoon.transformation;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipEntry;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* This transformer downloads a new file to disk.
* <p>
* It triggers for elements in the namespace "http://apache.org/cocoon/download/1.0".
* Attributes:
* @src : the file that should be downloaded
* @target (optional): path where the file should be stored (includes filename)
* @target-dir (optional): directory where the file should be stored
* @unzip (optional): if "true" then unzip file after downloading.
* If there is no @target or @target-dir attribute a temporary file is created.
* <p>
* Example XML input:
* <pre>
* {@code
* <download:download src="http://some.server.com/zipfile.zip"
* target="/tmp/zipfile.zip" unzip="true"/>
* }
* </pre>
* The @src specifies the file that should be downloaded. The
* @target specifies where the file should be stored. @unzip is true, so the
* file will be unzipped immediately.
* <p>
* The result is
* <pre>
* {@code
* <download:result unzipped="/path/to/unzipped/file/on/disk">/path/to/file/on/disk</download:result>
* }
* </pre>
* (@unzipped is only present when @unzip="true") or
* <pre>
* {@code
* <download:error>The error message</download:file>
* }
* </pre>
* if an error (other than a HTTP error) occurs.
* HTTP errors are thrown.
* Define this transformer in the sitemap:
* <pre>
* {@code
* <map:components>
* <map:transformers>
* <map:transformer name="download" logger="sitemap.transformer.download"
* src="org.apache.cocoon.transformation.DownloadTransformer"/>
* ...
* }
* </pre>
* Use this transformer:
* <pre>
* {@code
* <map:transform type="download"/>
* }
* </pre>
*
*
* @author <a href="mailto:[email protected]">Maarten Kroon</a>
* @author <a href="mailto:[email protected]">Huib Verweij</a>
*/
public class DownloadTransformer extends AbstractSAXTransformer {
public static final String DOWNLOAD_NS = "http://apache.org/cocoon/download/1.0";
public static final String DOWNLOAD_ELEMENT = "download";
private static final String DOWNLOAD_PREFIX = "download";
public static final String RESULT_ELEMENT = "result";
public static final String ERROR_ELEMENT = "error";
public static final String SRC_ATTRIBUTE = "src";
public static final String TARGET_ATTRIBUTE = "target";
public static final String TARGETDIR_ATTRIBUTE = "target-dir";
public static final String UNZIP_ATTRIBUTE = "unzip";
public static final String RECURSIVE_UNZIP_ATTRIBUTE = "recursive-unzip";
public static final String UNZIPPED_ATTRIBUTE = "unzipped";
public DownloadTransformer() {
this.defaultNamespaceURI = DOWNLOAD_NS;
}
@Override
public void setup(SourceResolver resolver, Map objectModel, String src,
Parameters params) throws ProcessingException, SAXException, IOException {
super.setup(resolver, objectModel, src, params);
}
@Override
public void startTransformingElement(String uri, String localName,
String qName, Attributes attributes) throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(uri) && DOWNLOAD_ELEMENT.equals(localName)) {
try {
File[] downloadResult = download(
attributes.getValue(SRC_ATTRIBUTE),
attributes.getValue(TARGETDIR_ATTRIBUTE),
attributes.getValue(TARGET_ATTRIBUTE),
attributes.getValue(UNZIP_ATTRIBUTE),
attributes.getValue(RECURSIVE_UNZIP_ATTRIBUTE)
);
File downloadedFile = downloadResult[0];
File unzipDir = downloadResult[1];
String absPath = downloadedFile.getCanonicalPath();
AttributesImpl attrsImpl = new AttributesImpl();
if (unzipDir != null) {
attrsImpl.addAttribute("", UNZIPPED_ATTRIBUTE, UNZIPPED_ATTRIBUTE, "CDATA", unzipDir.getAbsolutePath());
}
xmlConsumer.startElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT), attrsImpl);
xmlConsumer.characters(absPath.toCharArray(), 0, absPath.length());
xmlConsumer.endElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT));
} catch (Exception e) {
// throw new SAXException("Error downloading file", e);
xmlConsumer.startElement(uri, ERROR_ELEMENT, qName, attributes);
String message = e.getMessage();
xmlConsumer.characters(message.toCharArray(), 0, message.length());
xmlConsumer.endElement(uri, ERROR_ELEMENT, qName);
}
} else {
super.startTransformingElement(uri, localName, qName, attributes);
}
}
@Override
public void endTransformingElement(String uri, String localName, String qName)
throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(namespaceURI) && DOWNLOAD_ELEMENT.equals(localName)) {
return;
}
super.endTransformingElement(uri, localName, qName);
}
private File[] download(String sourceUri, String targetDir, String target, String unzip, String recursiveUnzip)
throws ProcessingException, IOException, SAXException {
File targetFile;
File unZipped = null;
if (null != target && !target.equals("")) {
targetFile = new File(target);
} else if (null != targetDir && !targetDir.equals("")) {
targetFile = new File(targetDir);
} else {
String baseName = FilenameUtils.getBaseName(sourceUri);
String extension = FilenameUtils.getExtension(sourceUri);
targetFile = File.createTempFile(baseName, "." + extension);
}
if (!targetFile.getParentFile().exists()) {
targetFile.getParentFile().mkdirs();
}
boolean unzipFile = (null != unzip && unzip.equals("true")) ||
(null != recursiveUnzip && recursiveUnzip.equals("true"));
String absPath = targetFile.getAbsolutePath();
String unzipDir = unzipFile ? FilenameUtils.removeExtension(absPath) : "";
HttpClient httpClient = new HttpClient();
httpClient.setConnectionTimeout(60000);
httpClient.setTimeout(60000);
if (System.getProperty("http.proxyHost") != null) {
// getLogger().warn("PROXY: "+System.getProperty("http.proxyHost"));
String nonProxyHostsRE = System.getProperty("http.nonProxyHosts", "");
if (nonProxyHostsRE.length() > 0) {<|fim▁hole|> for (String pHost : pHosts) {
nonProxyHostsRE += "|(^https?://" + pHost + ".*$)";
}
nonProxyHostsRE = nonProxyHostsRE.substring(1);
}
if (nonProxyHostsRE.length() == 0 || !sourceUri.matches(nonProxyHostsRE)) {
try {
HostConfiguration hostConfiguration = httpClient.getHostConfiguration();
hostConfiguration.setProxy(System.getProperty("http.proxyHost"), Integer.parseInt(System.getProperty("http.proxyPort", "80")));
httpClient.setHostConfiguration(hostConfiguration);
} catch (Exception e) {
throw new ProcessingException("Cannot set proxy!", e);
}
}
}
HttpMethod httpMethod = new GetMethod(sourceUri);
try {
int responseCode = httpClient.executeMethod(httpMethod);
if (responseCode < 200 || responseCode >= 300) {
throw new ProcessingException(String.format("Received HTTP status code %d (%s)", responseCode, httpMethod.getStatusText()));
}
OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile));
try {
IOUtils.copyLarge(httpMethod.getResponseBodyAsStream(), os);
} finally {
os.close();
}
} finally {
httpMethod.releaseConnection();
}
if (!"".equals(unzipDir)) {
unZipped = unZipIt(targetFile, unzipDir, recursiveUnzip);
}
return new File[] {targetFile, unZipped};
}
/**
* Unzip it
* @param zipFile input zip file
* @param outputFolder zip file output folder
*/
private File unZipIt(File zipFile, String outputFolder, String recursiveUnzip){
byte[] buffer = new byte[4096];
File folder = null;
try{
//create output directory is not exists
folder = new File(outputFolder);
if (!folder.exists()){
folder.mkdir();
}
//get the zipped file list entry
try (
//get the zip file content
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile))) {
//get the zipped file list entry
ZipEntry ze = zis.getNextEntry();
while(ze != null){
String fileName = ze.getName();
File newFile = new File(outputFolder + File.separator + fileName);
// System.out.println("file unzip : "+ newFile.getAbsoluteFile());
// create all non existing folders
// else you will hit FileNotFoundException for compressed folder
new File(newFile.getParent()).mkdirs();
try (FileOutputStream fos = new FileOutputStream(newFile)) {
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
}
if ((null != recursiveUnzip && "true".equals(recursiveUnzip)) && FilenameUtils.getExtension(fileName).equals("zip")) {
unZipIt(newFile, FilenameUtils.concat(outputFolder, FilenameUtils.getBaseName(fileName)), recursiveUnzip);
}
ze = zis.getNextEntry();
}
zis.closeEntry();
}
// System.out.println("Done unzipping.");
} catch(IOException ex){
ex.printStackTrace();
}
return folder;
}
}<|fim▁end|> | String[] pHosts = nonProxyHostsRE.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*").split("\\|");
nonProxyHostsRE = ""; |
<|file_name|>strconv.go<|end_file_name|><|fim▁begin|>// This file is part of MuMax, a high-performance micromagnetic simulator.
// Copyright 2011 Arne Vansteenkiste and Ben Van de Wiele.
// Use of this source code is governed by the GNU General Public License version 3
// (as published by the Free Software Foundation) that can be found in the license.txt file.
// Note that you are welcome to modify this code under the condition that you do not remove any
// copyright notices and prominently state that you modified it, giving a relevant date.
package common
// This file implements safe wrappers for strconv that panic on illegal input.
// Author: Arne Vansteenkiste
import (
"strconv"
)
// Safe strconv.Atof32
func Atof32(s string) float64 {
f, err := strconv.ParseFloat(s, 32)
if err != nil {
panic(InputErr(err.Error()))
}
return float64(f)
}
// Safe strconv.Atoi
func Atoi(s string) int {
i, err := strconv.Atoi(s)
if err != nil {
panic(InputErr(err.Error()))
}
return i
}
// Safe strconv.Atob
func Atob(str string) bool {
b, err := strconv.ParseBool(str)
if err != nil {
panic(InputErr(err.Error()))
}
return b
}
// Safe strconv.Atoi64
func Atoi64(str string) int64 {
i, err := strconv.ParseInt(str, 10, 64)
if err != nil {
panic(InputErr(err.Error()))
}
return i
}
// Safe strconv.Atof64
func Atof64(str string) float64 {
i, err := strconv.ParseFloat(str, 64)
if err != nil {
panic(InputErr(err.Error()))
}<|fim▁hole|> return i
}<|fim▁end|> | |
<|file_name|>XMLControl.py<|end_file_name|><|fim▁begin|>from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import xml.etree.ElementTree
from xml.etree.cElementTree import ElementTree, Element, SubElement
from xml.etree.cElementTree import fromstring, tostring
import fs_uae_launcher.fsui as fsui
from ..Config import Config
from ..Settings import Settings
from ..I18N import _, ngettext
class XMLControl(fsui.TextArea):
def __init__(self, parent):
fsui.TextArea.__init__(self, parent, horizontal_scroll=True)
self.path = ""
def connect_game(self, info):
tree = self.get_tree()
root = tree.getroot()
if not root.tag == "config":
return
game_node = self.find_or_create_node(root, "game")
game_node.set("uuid", info["uuid"])
game_name_node = self.find_or_create_node(game_node, "name")
game_name_node.text = info["name"]
self.set_tree(tree)
def find_or_create_node(self, element, name):
node = element.find(name)
if node is None:
node = SubElement(element, name)
return node
def set_path(self, path):
if not os.path.exists(path):
path = ""
self.path = path
if path:
self.load_xml(path)
else:
self.set_text("")
def get_tree(self):
text = self.get_text().strip()
try:
root = fromstring(text.encode("UTF-8"))
except Exception:
# FIXME: show message
import traceback
traceback.print_exc()
return
tree = ElementTree(root)
indent_tree(root)
return tree
def set_tree(self, tree):
data = tostring(tree.getroot(), encoding="UTF-8").decode("UTF-8")
std_decl = "<?xml version='1.0' encoding='UTF-8'?>"
if data.startswith(std_decl):
data = data[len(std_decl):].strip()
self.set_text(data)
def load_xml(self, path):
with open(path, "rb") as f:
data = f.read()
self.set_text(data)
def save(self):
if not self.path:
print("no path to save XML to")
return
self.save_xml(self.path)
def save_xml(self, path):
self.get_tree().write(self.path)
def indent_tree(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent_tree(elem, level+1)
if not elem.tail or not elem.tail.strip():<|fim▁hole|><|fim▁end|> | elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i |
<|file_name|>82.py<|end_file_name|><|fim▁begin|>idade = 12
if idade < 4:
preco = 0
elif idade < 18:
preco = 5
elif idade < 65:
preco = 10
else:
preco = 5
print('Seu custo de admissão e R$' + str(preco) + '.')
'''
Foi adicionado mais bloco de instrução elif<|fim▁hole|>seja acima de 65 executa o último bloco else.
'''<|fim▁end|> | para idades abaixo de 65. Se caso a idade for maior
que 18 e menor que 65 o bloco e executado, caso a idade |
<|file_name|>rust.rs<|end_file_name|><|fim▁begin|>extern crate proc_macro;
extern crate phrases as sayings;
extern crate syn;
#[macro_use]
extern crate quote;<|fim▁hole|>
use sayings::japanese::greetings as ja_greetings;
use sayings::japanese::farewells::*;
use sayings::english::{self, greetings as en_greetings, farewells as en_farewells};
extern "C" {
fn c_callback(n: c_int);
}
fn main() {
println!("Hello in English; {}", en_greetings::hello());
println!("And in Japanese: {}", ja_greetings::hello());
println!("Goodbye in English: {}", english::farewells::goodbye());
println!("Again: {}", en_farewells::goodbye());
println!("And in Japanese: {}", goodbye());
}<|fim▁end|> | |
<|file_name|>WorldDaoTest.java<|end_file_name|><|fim▁begin|>package com.github.ssindelar.ps2parser;
import static org.fest.assertions.api.Assertions.assertThat;
import org.testng.annotations.Test;
import com.github.ssindelar.ps2parser.data.world.WorldResponse;
import com.github.ssindelar.ps2parser.data.world.status.ApiWorldStatus;
import com.github.ssindelar.ps2parser.data.world.status.WorldStatusResponds;
public class WorldDaoTest {
private final WorldApiDAO dao = TestData.API_CONNECTION.getWorldDao();
@Test
public void getAllWorlds() {
WorldResponse worldResponse = this.dao.getAllWorlds();
assertThat(worldResponse).isNotNull();
assertThat(worldResponse.getWorlds().size()).isEqualTo(worldResponse.getReturned());
}<|fim▁hole|> public void getStatusAllWorlds() {
WorldStatusResponds statusResponse = this.dao.getStatusAllWorlds();
assertThat(statusResponse).isNotNull();
assertThat(statusResponse.getAdditionalProperties()).isEmpty();
assertThat(statusResponse.getWorlds().size()).isEqualTo(statusResponse.getReturned());
for (ApiWorldStatus worldStatus : statusResponse.getWorlds()) {
assertThat(worldStatus.getAdditionalProperties()).isEmpty();
assertThat(worldStatus.getLastReported()).isNotNull();
assertThat(worldStatus.getGame()).isNotNull().isEqualTo("ps2");
}
}
}<|fim▁end|> |
@Test |
<|file_name|>data_notes.py<|end_file_name|><|fim▁begin|># Copyright 2015 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: data_notes
:platform: Unix
:synopsis: A module containing extended doc strings for the data module.
.. moduleauthor:: Nicola Wadeson <[email protected]>
"""
def _set_preview_note():
"""
Each ``preview_list`` element should be of the form
``start:stop:step:chunk``, where ``stop``, ``step`` and ``chunk`` are
optional (defaults: ``stop``=``start``+ 1, ``step``= 1, ``chunk`` = 1)
but must be given in that order.
.. note::
**start:stop[:step]**
represents the set of indices specified by:
>>> indices = range(start, stop[, step])
For more information see :func:`range`
**start:stop:step:chunk (chunk > 1)**
represents the set of indices specified by:
>>> a = np.tile(np.arange(start, stop, step), (chunk, 1))
>>> b = np.transpose(np.tile(np.arange(chunk)-chunk/2, \
(a.shape[1], 1)))
>>> indices = np.ravel(np.transpose(a + b))
Chunk indicates how many values to take around each value in
``range(start, stop, step)``. It is only available for slicing
dimensions.
.. warning:: If any indices are out of range (or negative)
then the list is invalid. When chunk > 1, new start and
end values will be:
>>> new_start = start - int(chunk/2)
>>> new_end = range(start, stop, step)[-1] + \
(step - int(chunk/2))
**accepted values**:
Each entry is executed using :func:`eval` so simple formulas are\
allowed and may contain the following keywords:
* ``:`` is a simplification for 0:end:1:1 (all values)
* ``mid`` is int(shape[dim]/2)-1<|fim▁hole|> """
def image_key():
"""
This is a helper function to be used after :meth:`savu.data.\
data_structures.data_create.DataCreate.create_dataset`,
>>> out_dataset[0].create_dataset(in_dataset[0])
>>> out_dataset[0].trim_output_data(in_dataset[0], image_key=0)
if in_dataset[0] is a plugin input dataset with an image_key and 0 is
the data index.
"""
def _create():
"""
.. note:: **Creating a dataset**
Each new dataset requires the following information:
* ``shape``
* ``axis_labels``
* ``patterns``
This function can be used to setup the required information in one
of two ways:
1. Passing a ``Data`` object as the only argument: All required
information is coped from this data object. For example,
>>> out_dataset[0].create_dataset(in_dataset[0])
2. Passing kwargs: ``shape`` and ``axis_labels`` are required
(see above for other optional arguments). For example,
>>> out_dataset[0].create_dataset(axis_labels=labels, \
shape=new_shape)
.. warning:: If ``pattern`` keyword is not used, patterns must be added
after :meth:`~savu.data.data_structures.data_create.DataCreate.\
create_dataset` by calling :func:`~savu.data.data_structures.data.Data.\
add_pattern`.
"""
def _shape():
"""
.. note::
``shape`` keyword argument
Options to pass are:
1. Data object: Copy shape from the Data object.
>>> out_dataset[0].create_dataset(axis_labels=labels, \
shape=in_dataset[0])
2. tuple: Define shape explicity.
>>> out_dataset[0].create_dataset(axis_labels=labels, \
shape=(10, 20, 30))
"""
def axis_labels():
"""
.. note::
``axis_labels`` keyword argument
Options to pass are:
1. Data object: Copy all labels from the Data object.
>>> out_dataset[0].create_dataset(axis_labels=in_dataset[0], \
shape=new_shape)
2. {Data_obj: list}: Copy labels from the Data object and then
remove or insert.
* To remove dimensions: list_entry = 'dim'. For example, to
remove the first and last axis_labels from the copied list:
>>> out_dataset[0].create_dataset(axis_labels=\
{in_dataset[0]: ['1', '-1']), shape=new_shape})
* To add/replace dimensions: list_entry = 'dim.name.unit'.
>>> out_dataset[0].create_dataset(axis_labels={in_dataset[0]: \
['2.det_x.pixel', '3.det_y.pixel']}, shape=new_shape)
* To insert dimensions: list_entry = '~dim.name.unit'.
>>> out_dataset[0].create_dataset(axis_labels={in_dataset[0]: \
['~2.det_x.pixel', '~3.det_y.pixel']}, shape=new_shape)
(or a combination, where each successive step is applied after
the previous changes have been made.)
3. list: Where each element is of the form 'name.unit'.
>>> out_dataset[0].create_dataset(axis_labels=['rotation.deg',\
'det_x.pixel', 'det_y.pixel'], shape=new_shape)
"""
def patterns():
"""
.. note::
``patterns`` keyword argument
Options to pass are:
1. Data object: Copy all patterns from the Data object.
>>> out_dataset[0].create_dataset(axis_labels=labels, \
shape=new_shape, patterns=in_dataset[0])
2. {Data_obj: list}: Copy only the patterns given in the list
from the Data object.
* Copy the patterns: list_entry = 'name'
>>> out_dataset[0].crate_dataset(axis_labels=labels, \
shape=new_shape, patterns={in_dataset[0], ['SINOGRAM', 'PROJECTION']})
* Copy patterns but remove dimensions: list_entry = \
'name1.r1,r2...':
>>> out_dataset[0].crate_dataset(axis_labels=labels, \
shape=new_shape, patterns={in_dataset[0], ['SINOGRAM.1', 'PROJECTION.1']})
* Copy ALL patterns but remove dimensions: list_entry = \
'\*.r1,r2...':
>>> out_dataset[0].crate_dataset(axis_labels=labels, \
shape=new_shape, patterns={in_dataset[0], '*.0'})
"""
def _padding():
"""
Either 'dim.pad', 'dim.after.pad' or 'dim.before.pad', where 'dim' and\
'pad' are integers and give the dimension to pad and the the pad \
amount respectively. The keywords 'before' and 'after' specify padding\
'before' or 'after' the original dimension index (if neither are\
specified the both directions will be padded. """<|fim▁end|> | * ``end`` is shape[dim] |
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>var events = require('events');
var request = require('request');
var zlib = require('zlib');
var iconv = require('iconv-lite');
var async = require('async');
var imagesize = require('imagesize');
var moment = require('moment');
var _ = require('underscore');
var cache = require('./cache');
var htmlUtils = require('./html-utils');
if (!GLOBAL.CONFIG) {
GLOBAL.CONFIG = require('../config');
}
/**
* @private
* Do HTTP GET request and handle redirects
* @param url Request uri (parsed object or string)
* @param {Object} options
* @param {Number} [options.maxRedirects]
* @param {Boolean} [options.fullResponse] True if need load full page response. Default: false.
* @param {Function} [callback] The completion callback function or events.EventEmitter object
* @returns {events.EventEmitter} The emitter object which emit error or response event
*/
var getUrl = exports.getUrl = function(url, options) {
var req = new events.EventEmitter();
var options = options || {};
// Store cookies between redirects and requests.
var jar = options.jar;
if (!jar) {
jar = request.jar();
}
process.nextTick(function() {
try {
var supportGzip = !process.version.match(/^v0\.8/);
var r = request({
uri: url,
method: 'GET',
headers: {
'User-Agent': CONFIG.USER_AGENT,
'Connection': 'close',
'Accept-Encoding': supportGzip ? 'gzip' : ''
},
maxRedirects: options.maxRedirects || 3,
timeout: options.timeout || CONFIG.RESPONSE_TIMEOUT,
followRedirect: options.followRedirect,
jar: jar
})
.on('error', function(error) {
req.emit('error', error);
})
.on('response', function(res) {
if (supportGzip && ['gzip', 'deflate'].indexOf(res.headers['content-encoding']) > -1) {
var gunzip = zlib.createUnzip();
gunzip.request = res.request;
gunzip.statusCode = res.statusCode;
gunzip.headers = res.headers;
if (!options.asBuffer) {
gunzip.setEncoding("binary");
}
req.emit('response', gunzip);
res.pipe(gunzip);
} else {
if (!options.asBuffer) {
res.setEncoding("binary");
}
req.emit('response', res);
}
});
req.emit('request', r);
} catch (ex) {
console.error('Error on getUrl for', url, '.\n Error:' + ex);
req.emit('error', ex);
}
});
return req;
};
var getHead = function(url, options) {
var req = new events.EventEmitter();
var options = options || {};
// Store cookies between redirects and requests.
var jar = options.jar;
if (!jar) {
jar = request.jar();
}
process.nextTick(function() {
try {
var r = request({
uri: url,
method: 'HEAD',
headers: {
'User-Agent': CONFIG.USER_AGENT,
'Connection': 'close'
},
maxRedirects: options.maxRedirects || 3,
timeout: options.timeout || CONFIG.RESPONSE_TIMEOUT,
followRedirect: options.followRedirect,
jar: jar
})
.on('error', function(error) {
req.emit('error', error);
})
.on('response', function(res) {
req.emit('response', res);
});
req.emit('request', r);
} catch (ex) {
console.error('Error on getHead for', url, '.\n Error:' + ex);
req.emit('error', ex);
}
});
return req;
};
exports.getCharset = function(string, doNotParse) {
var charset;
if (doNotParse) {
charset = string.toUpperCase();
} else if (string) {
var m = string && string.match(/charset\s*=\s*([\w_-]+)/i);
charset = m && m[1].toUpperCase();
}
return charset;
};
exports.encodeText = function(charset, text) {
try {
var b = iconv.encode(text, "ISO8859-1");
return iconv.decode(b, charset || "UTF-8");
} catch(e) {
return text;
}
};
/**
* @public
* Get image size and type.
* @param {String} uri Image uri.
* @param {Object} [options] Options.
* @param {Boolean} [options.cache] False to disable cache. Default: true.
* @param {Function} callback Completion callback function. The callback gets two arguments (error, result) where result has:
* - result.format
* - result.width
* - result.height
*
* error == 404 if not found.
* */
exports.getImageMetadata = function(uri, options, callback){
if (typeof options === 'function') {
callback = options;
options = {};
}
options = options || {};
cache.withCache("image-meta:" + uri, function(callback) {
var loadImageHead, imageResponseStarted, totalTime, timeout, contentLength;
var requestInstance = null;
function finish(error, data) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
} else {
return;
}
// We don't need more data. Abort causes error. timeout === null here so error will be skipped.
requestInstance && requestInstance.abort();
if (!error && !data) {
error = 404;
}
data = data || {};
if (options.debug) {
data._time = {
imageResponseStarted: imageResponseStarted || totalTime(),
loadImageHead: loadImageHead && loadImageHead() || 0,
total: totalTime()
};
}
if (error && error.message) {
error = error.message;
}
if ((typeof error === 'string' && error.indexOf('ENOTFOUND') > -1) ||
error === 500) {
error = 404;
}
if (error) {
data.error = error;
}
callback(null, data);
}
timeout = setTimeout(function() {
finish("timeout");
}, options.timeout || CONFIG.RESPONSE_TIMEOUT);
if (options.debug) {
totalTime = createTimer();
}
async.waterfall([
function(cb){
getUrl(uri, {
timeout: options.timeout || CONFIG.RESPONSE_TIMEOUT,
maxRedirects: 5,
asBuffer: true
})
.on('request', function(req) {
requestInstance = req;
})
.on('response', function(res) {
var content_type = res.headers['content-type'];
if (content_type && content_type !== 'application/octet-stream' && content_type !== 'binary/octet-stream') {
if (content_type.indexOf('image/') === -1) {
cb('invalid content type: ' + res.headers['content-type']);
}
} else {
if (!uri.match(/\.(jpg|png|gif)(\?.*)?$/i)) {
cb('invalid content type: no content-type header and file extension');
}
}
if (res.statusCode == 200) {
if (options.debug) {
imageResponseStarted = totalTime();
}
contentLength = parseInt(res.headers['content-length'] || '0', 10);
imagesize(res, cb);
} else {
cb(res.statusCode);
}
})
.on('error', function(error) {
cb(error);
});
},
function(data, cb){
if (options.debug) {
loadImageHead = createTimer();
}
if (contentLength) {
data.content_length = contentLength;
}
cb(null, data);
}
], finish);
}, {disableCache: options.disableCache}, callback);
};
exports.getUriStatus = function(uri, options, callback) {
if (typeof options === 'function') {
callback = options;
options = {};
}
options = options || {};
cache.withCache("status:" + uri, function(cb) {
var time, timeout;
function finish(error, data) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
} else {
return;
}
data = data || {};
if (error) {
data.error = error;
}
if (options.debug) {
data._time = time();
}
cb(null, data);
}
timeout = setTimeout(function() {
finish("timeout");
}, options.timeout || CONFIG.RESPONSE_TIMEOUT);
if (options.debug) {
time = createTimer();
}
getUriStatus(uri, options, finish);
}, {disableCache: options.disableCache}, callback);
};
exports.getContentType = function(uriForCache, uriOriginal, options, cb) {
cache.withCache("content-type:" + uriForCache, function(cb) {
var timeout, requestInstance, totalTime;
function finish(error, content_type) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
} else {
return;
}
// We don't need more data. Abort causes error. timeout === null here so error will be skipped.
requestInstance && requestInstance.abort();
var data = {};
if (options.debug) {
data._time = totalTime();
}
if (error) {
data.error = error;
}
if (!error && !data) {
data.error = 404;
}
data.type = content_type;
cb(null, data);
}
timeout = setTimeout(function() {
finish("timeout");
}, options.timeout || CONFIG.RESPONSE_TIMEOUT);
if (options.debug) {
totalTime = createTimer();
}
getHead(uriOriginal, {
timeout: options.timeout || CONFIG.RESPONSE_TIMEOUT,
maxRedirects: 5
})
.on('request', function(req) {
requestInstance = req;
})
.on('response', function(res) {
var content_type = res.headers['content-type'];
if (content_type) {
content_type = content_type.split(';')[0];
}
finish(null, content_type);
})
.on('error', function(error) {
finish(error);
});
}, {disableCache: options.disableCache}, cb);
};
var NOW = new Date().getTime();
exports.unifyDate = function(date) {
if (typeof date === "number") {
if (date === 0) {
return null;
}
// Check if date in seconds, not miliseconds.
if (NOW / date > 100) {
date = date * 1000;
}
var parsedDate = moment(date);
if (parsedDate.isValid()) {
return parsedDate.toJSON();
}
}
// TODO: time in format 'Mon, 29 October 2012 18:15:00' parsed as local timezone anyway.
var parsedDate = moment.utc(date);
if (parsedDate && parsedDate.isValid()) {
return parsedDate.toJSON();
}
return date;
};
var lowerCaseKeys = exports.lowerCaseKeys = function(obj) {
for (var k in obj) {
var lowerCaseKey = k.toLowerCase();
if (lowerCaseKey != k) {
obj[lowerCaseKey] = obj[k];
delete obj[k];
k = lowerCaseKey;
}
if (typeof obj[k] == "object") {
lowerCaseKeys(obj[k]);
}
}
};
exports.sendLogToWhitelist = function(uri, meta, oembed, whitelistRecord) {
if (!CONFIG.WHITELIST_LOG_URL) {
return
}
if (whitelistRecord && !whitelistRecord.isDefault) {
// Skip whitelisted urls.
return;
}
var data = getWhitelistLogData(meta, oembed);
if (data) {
data.uri = uri;
request({
uri: CONFIG.WHITELIST_LOG_URL,
method: 'GET',
qs: data
})
.on('error', function(error) {
console.error('Error logging url:', uri, error);
})
.on('response', function(res) {
if (res.statusCode !== 200) {
console.error('Error logging url:', uri, res.statusCode);
}
});
}
};
exports.filterLinks = function(data, options) {
var links = data.links;
for(var i = 0; i < links.length;) {
var link = links[i];<|fim▁hole|> // SSL.
var isImage = link.type.indexOf('image') === 0;
var isHTML5Video = link.type.indexOf('video/') === 0;
if (options.filterNonSSL) {
var sslProtocol = link.href && link.href.match(/^(https:)?\/\//i);
var hasSSL = link.rel.indexOf('ssl') > -1;
if (sslProtocol || hasSSL || isImage || isHTML5Video) {
// Good: has ssl.
} else {
// Filter non ssl if required.
link.error = true;
}
}
// HTML5.
if (options.filterNonHTML5) {
var hasHTML5 = link.rel.indexOf('html5') > -1;
var isReader = link.rel.indexOf('reader') > -1;
if (hasHTML5 || isImage || isHTML5Video || isReader) {
// Good: is HTML5.
} else {
// Filter non HTML5 if required.
link.error = true;
}
}
// Max-width.
if (options.maxWidth) {
var isImage = link.type.indexOf('image') === 0;
// TODO: force make html5 video responsive?
var isHTML5Video = link.type.indexOf('video/') === 0;
var m = link.media;
if (m && !isImage && !isHTML5Video) {
if (m.width && m.width > options.maxWidth) {
link.error = true;
} else if (m['min-width'] && m['min-width'] > options.maxWidth) {
link.error = true;
}
}
}
if (link.error) {
links.splice(i, 1);
} else {
i++;
}
}
};
function iterateLinks(links, func) {
if (links instanceof Array) {
return links.forEach(func);
} else if (typeof links === 'object') {
for(var id in links) {
var items = links[id];
if (items instanceof Array) {
items.forEach(func);
}
}
}
}
exports.generateLinksHtml = function(data, options) {
// Links may be grouped.
var links = data.links;
iterateLinks(links, function(link) {
if (!link.html && !link.type.match(/^image/)) {
// Force make mp4 video to be autoplay in autoplayMode.
if (options.autoplayMode && link.type.indexOf('video/') === 0 && link.rel.indexOf('autoplay') === -1) {
link.rel.push('autoplay');
}
var html = htmlUtils.generateLinkElementHtml(link, {
iframelyData: data
});
if (html) {
link.html = html;
}
}
});
if (!data.html) {
var links_list = [];
iterateLinks(links, function(link) {
links_list.push(link);
});
var plain_data = _.extend({}, data, {links:links_list});
// Prevent override main html field.
var mainLink = htmlUtils.findMainLink(plain_data, options);
if (mainLink) {
if (mainLink.html) {
data.rel = mainLink.rel;
data.html = mainLink.html;
}
}
}
};
//====================================================================================
// Private
//====================================================================================
var getUriStatus = function(uri, options, cb) {
var r = request({
uri: uri,
method: 'GET',
headers: {
'User-Agent': CONFIG.USER_AGENT
},
maxRedirects: 5,
timeout: options.timeout || CONFIG.RESPONSE_TIMEOUT,
jar: request.jar() //Enable cookies, uses new jar
})
.on('error', cb)
.on('response', function(res) {
r.abort();
cb(null, {
code: res.statusCode,
content_type: res.headers['content-type']
});
});
};
var createTimer = exports.createTimer = function() {
var timer = new Date().getTime();
return function() {
return new Date().getTime() - timer;
};
};
var SHOPIFY_OEMBED_URLS = ['shopify.com', '/collections/', '/products/'];
function isYoutube(meta) {
var video;
if (meta.og && (video = meta.og.video)) {
if (!(video instanceof Array)) {
video = [video];
}
for(var i = 0; i < video.length; i++) {
var v = video[i];
var url = v.url || v;
if (url.indexOf && url.indexOf('youtube') > -1) {
return true;
}
if (v.secure_url && v.secure_url.indexOf && v.secure_url.indexOf('youtube') > -1) {
return true;
}
}
}
return false;
}
function getWhitelistLogData(meta, oembed) {
var r = {};
if (meta) {
var isJetpack = meta.twitter && meta.twitter.card === 'jetpack';
var isWordpress = meta.twitter && meta.twitter.generator === 'wordpress';
var isShopify = false;
if (meta.alternate) {
var alternate = meta.alternate instanceof Array ? meta.alternate : [meta.alternate];
var oembedLink;
for(var i = 0; !oembedLink && i < alternate.length; i++) {
var a = alternate[i];
if (a.type && a.href && a.type.indexOf('oembed') > -1) {
oembedLink = a;
}
}
if (oembedLink) {
for(var i = 0; !isShopify && i < SHOPIFY_OEMBED_URLS.length; i++) {
if (oembedLink.href.indexOf(SHOPIFY_OEMBED_URLS[i]) > -1) {
isShopify = true;
}
}
}
}
r.twitter_photo =
(meta.twitter && meta.twitter.card === 'photo')
&&
(meta.og && meta.og.type !== "article")
&&
!isJetpack
&&
!isWordpress
&&
(meta.twitter && meta.twitter.site !== 'tumblr')
&& (
(meta.twitter && !!meta.twitter.image)
||
(meta.og && !!meta.og.image)
);
r.twitter_player =
meta.twitter && !!meta.twitter.player;
r.twitter_stream =
meta.twitter && meta.twitter.player && !!meta.twitter.player.stream;
r.og_video =
(meta.og && !!meta.og.video)
&& !isYoutube(meta);
r.video_src =
!!meta.video_src;
r.sm4_video =
!!(meta.sm4 && meta.sm4.video && meta.sm4.video.embed)
}
if (oembed && oembed.type !== 'link') {
r['oembed_' + oembed.type] = true;
}
var hasTrue = false;
var result = {};
for(var k in r) {
if (r[k]) {
result[k] = r[k];
hasTrue = true;
}
}
// TODO: embedURL: getEl('[itemprop="embedURL"]')
return hasTrue && result;
}<|fim▁end|> | |
<|file_name|>gallery.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
// Name: src/ribbon/gallery.cpp
// Purpose: Ribbon control which displays a gallery of items to choose from
// Author: Peter Cawley
// Modified by:
// Created: 2009-07-22
// Copyright: (C) Peter Cawley
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
#include "wx/wxprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#if wxUSE_RIBBON
#include "wx/ribbon/gallery.h"
#include "wx/ribbon/art.h"
#include "wx/ribbon/bar.h"
#include "wx/dcbuffer.h"
#include "wx/clntdata.h"
#ifndef WX_PRECOMP
#endif
#ifdef __WXMSW__
#include "wx/msw/private.h"
#endif
wxDEFINE_EVENT(wxEVT_RIBBONGALLERY_HOVER_CHANGED, wxRibbonGalleryEvent);
wxDEFINE_EVENT(wxEVT_RIBBONGALLERY_SELECTED, wxRibbonGalleryEvent);
wxDEFINE_EVENT(wxEVT_RIBBONGALLERY_CLICKED, wxRibbonGalleryEvent);
IMPLEMENT_DYNAMIC_CLASS(wxRibbonGalleryEvent, wxCommandEvent)
IMPLEMENT_CLASS(wxRibbonGallery, wxRibbonControl)
class wxRibbonGalleryItem
{
public:
wxRibbonGalleryItem()
{
m_id = 0;
m_is_visible = false;
}
void SetId(int id) {m_id = id;}
void SetBitmap(const wxBitmap& bitmap) {m_bitmap = bitmap;}
const wxBitmap& GetBitmap() const {return m_bitmap;}
void SetIsVisible(bool visible) {m_is_visible = visible;}
void SetPosition(int x, int y, const wxSize& size)
{
m_position = wxRect(wxPoint(x, y), size);
}
bool IsVisible() const {return m_is_visible;}
const wxRect& GetPosition() const {return m_position;}
void SetClientObject(wxClientData *data) {m_client_data.SetClientObject(data);}
wxClientData *GetClientObject() const {return m_client_data.GetClientObject();}
void SetClientData(void *data) {m_client_data.SetClientData(data);}
void *GetClientData() const {return m_client_data.GetClientData();}
protected:
wxBitmap m_bitmap;
wxClientDataContainer m_client_data;
wxRect m_position;
int m_id;
bool m_is_visible;
};
BEGIN_EVENT_TABLE(wxRibbonGallery, wxRibbonControl)
EVT_ENTER_WINDOW(wxRibbonGallery::OnMouseEnter)
EVT_ERASE_BACKGROUND(wxRibbonGallery::OnEraseBackground)
EVT_LEAVE_WINDOW(wxRibbonGallery::OnMouseLeave)
EVT_LEFT_DOWN(wxRibbonGallery::OnMouseDown)
EVT_LEFT_UP(wxRibbonGallery::OnMouseUp)
EVT_LEFT_DCLICK(wxRibbonGallery::OnMouseDClick)
EVT_MOTION(wxRibbonGallery::OnMouseMove)
EVT_PAINT(wxRibbonGallery::OnPaint)
EVT_SIZE(wxRibbonGallery::OnSize)
END_EVENT_TABLE()
wxRibbonGallery::wxRibbonGallery()
{
}
wxRibbonGallery::wxRibbonGallery(wxWindow* parent,
wxWindowID id,
const wxPoint& pos,
const wxSize& size,
long style)
: wxRibbonControl(parent, id, pos, size, wxBORDER_NONE)
{
CommonInit(style);
}
wxRibbonGallery::~wxRibbonGallery()
{
Clear();
}
bool wxRibbonGallery::Create(wxWindow* parent,
wxWindowID id,
const wxPoint& pos,
const wxSize& size,
long style)
{
if(!wxRibbonControl::Create(parent, id, pos, size, wxBORDER_NONE))
{
return false;
}
CommonInit(style);
return true;
}
void wxRibbonGallery::CommonInit(long WXUNUSED(style))
{
m_selected_item = NULL;
m_hovered_item = NULL;
m_active_item = NULL;
m_scroll_up_button_rect = wxRect(0, 0, 0, 0);
m_scroll_down_button_rect = wxRect(0, 0, 0, 0);
m_extension_button_rect = wxRect(0, 0, 0, 0);
m_mouse_active_rect = NULL;
m_bitmap_size = wxSize(64, 32);
m_bitmap_padded_size = m_bitmap_size;
m_item_separation_x = 0;
m_item_separation_y = 0;
m_scroll_amount = 0;
m_scroll_limit = 0;
m_up_button_state = wxRIBBON_GALLERY_BUTTON_DISABLED;
m_down_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
m_extension_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
m_hovered = false;
SetBackgroundStyle(wxBG_STYLE_CUSTOM);
}
void wxRibbonGallery::OnMouseEnter(wxMouseEvent& evt)
{
m_hovered = true;
if(m_mouse_active_rect != NULL && !evt.LeftIsDown())
{
m_mouse_active_rect = NULL;
m_active_item = NULL;
}
Refresh(false);
}
void wxRibbonGallery::OnMouseMove(wxMouseEvent& evt)
{
bool refresh = false;
wxPoint pos = evt.GetPosition();
if(TestButtonHover(m_scroll_up_button_rect, pos, &m_up_button_state))
refresh = true;
if(TestButtonHover(m_scroll_down_button_rect, pos, &m_down_button_state))
refresh = true;
if(TestButtonHover(m_extension_button_rect, pos, &m_extension_button_state))
refresh = true;
wxRibbonGalleryItem *hovered_item = NULL;
wxRibbonGalleryItem *active_item = NULL;
if(m_client_rect.Contains(pos))
{
if(m_art && m_art->GetFlags() & wxRIBBON_BAR_FLOW_VERTICAL)
pos.x += m_scroll_amount;
else
pos.y += m_scroll_amount;
size_t item_count = m_items.Count();
size_t item_i;
for(item_i = 0; item_i < item_count; ++item_i)
{
wxRibbonGalleryItem *item = m_items.Item(item_i);
if(!item->IsVisible())
continue;
if(item->GetPosition().Contains(pos))
{
if(m_mouse_active_rect == &item->GetPosition())
active_item = item;
hovered_item = item;
break;
}
}
}
if(active_item != m_active_item)
{
m_active_item = active_item;
refresh = true;
}
if(hovered_item != m_hovered_item)
{
m_hovered_item = hovered_item;
wxRibbonGalleryEvent notification(
wxEVT_RIBBONGALLERY_HOVER_CHANGED, GetId());
notification.SetEventObject(this);
notification.SetGallery(this);
notification.SetGalleryItem(hovered_item);
ProcessWindowEvent(notification);
refresh = true;
}
if(refresh)
Refresh(false);
}
bool wxRibbonGallery::TestButtonHover(const wxRect& rect, wxPoint pos,
wxRibbonGalleryButtonState* state)
{
if(*state == wxRIBBON_GALLERY_BUTTON_DISABLED)
return false;
wxRibbonGalleryButtonState new_state;
if(rect.Contains(pos))
{
if(m_mouse_active_rect == &rect)
new_state = wxRIBBON_GALLERY_BUTTON_ACTIVE;
else
new_state = wxRIBBON_GALLERY_BUTTON_HOVERED;
}
else
new_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(new_state != *state)
{
*state = new_state;
return true;
}
else
{
return false;
}
}
void wxRibbonGallery::OnMouseLeave(wxMouseEvent& WXUNUSED(evt))
{
m_hovered = false;
m_active_item = NULL;
if(m_up_button_state != wxRIBBON_GALLERY_BUTTON_DISABLED)
m_up_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(m_down_button_state != wxRIBBON_GALLERY_BUTTON_DISABLED)
m_down_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(m_extension_button_state != wxRIBBON_GALLERY_BUTTON_DISABLED)
m_extension_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(m_hovered_item != NULL)
{
m_hovered_item = NULL;
wxRibbonGalleryEvent notification(
wxEVT_RIBBONGALLERY_HOVER_CHANGED, GetId());
notification.SetEventObject(this);
notification.SetGallery(this);
ProcessWindowEvent(notification);
}
Refresh(false);
}
void wxRibbonGallery::OnMouseDown(wxMouseEvent& evt)
{
wxPoint pos = evt.GetPosition();
m_mouse_active_rect = NULL;
if(m_client_rect.Contains(pos))
{
if(m_art && m_art->GetFlags() & wxRIBBON_BAR_FLOW_VERTICAL)
pos.x += m_scroll_amount;
else
pos.y += m_scroll_amount;
size_t item_count = m_items.Count();
size_t item_i;
for(item_i = 0; item_i < item_count; ++item_i)
{
wxRibbonGalleryItem *item = m_items.Item(item_i);
if(!item->IsVisible())
continue;
const wxRect& rect = item->GetPosition();
if(rect.Contains(pos))
{
m_active_item = item;
m_mouse_active_rect = ▭
break;
}
}
}
else if(m_scroll_up_button_rect.Contains(pos))
{
if(m_up_button_state != wxRIBBON_GALLERY_BUTTON_DISABLED)
{
m_mouse_active_rect = &m_scroll_up_button_rect;
m_up_button_state = wxRIBBON_GALLERY_BUTTON_ACTIVE;
}
}
else if(m_scroll_down_button_rect.Contains(pos))
{
if(m_down_button_state != wxRIBBON_GALLERY_BUTTON_DISABLED)
{
m_mouse_active_rect = &m_scroll_down_button_rect;
m_down_button_state = wxRIBBON_GALLERY_BUTTON_ACTIVE;
}
}
else if(m_extension_button_rect.Contains(pos))
{
if(m_extension_button_state != wxRIBBON_GALLERY_BUTTON_DISABLED)
{
m_mouse_active_rect = &m_extension_button_rect;
m_extension_button_state = wxRIBBON_GALLERY_BUTTON_ACTIVE;
}
}
if(m_mouse_active_rect != NULL)
Refresh(false);
}
void wxRibbonGallery::OnMouseUp(wxMouseEvent& evt)
{
if(m_mouse_active_rect != NULL)
{
wxPoint pos = evt.GetPosition();
<|fim▁hole|> else
pos.y += m_scroll_amount;
}
if(m_mouse_active_rect->Contains(pos))
{
if(m_mouse_active_rect == &m_scroll_up_button_rect)
{
m_up_button_state = wxRIBBON_GALLERY_BUTTON_HOVERED;
ScrollLines(-1);
}
else if(m_mouse_active_rect == &m_scroll_down_button_rect)
{
m_down_button_state = wxRIBBON_GALLERY_BUTTON_HOVERED;
ScrollLines(1);
}
else if(m_mouse_active_rect == &m_extension_button_rect)
{
m_extension_button_state = wxRIBBON_GALLERY_BUTTON_HOVERED;
wxCommandEvent notification(wxEVT_BUTTON,
GetId());
notification.SetEventObject(this);
ProcessWindowEvent(notification);
}
else if(m_active_item != NULL)
{
if(m_selected_item != m_active_item)
{
m_selected_item = m_active_item;
wxRibbonGalleryEvent notification(
wxEVT_RIBBONGALLERY_SELECTED, GetId());
notification.SetEventObject(this);
notification.SetGallery(this);
notification.SetGalleryItem(m_selected_item);
ProcessWindowEvent(notification);
}
wxRibbonGalleryEvent notification(
wxEVT_RIBBONGALLERY_CLICKED, GetId());
notification.SetEventObject(this);
notification.SetGallery(this);
notification.SetGalleryItem(m_selected_item);
ProcessWindowEvent(notification);
}
}
m_mouse_active_rect = NULL;
m_active_item = NULL;
Refresh(false);
}
}
void wxRibbonGallery::OnMouseDClick(wxMouseEvent& evt)
{
// The 2nd click of a double-click should be handled as a click in the
// same way as the 1st click of the double-click. This is useful for
// scrolling through the gallery.
OnMouseDown(evt);
OnMouseUp(evt);
}
void wxRibbonGallery::SetItemClientObject(wxRibbonGalleryItem* itm,
wxClientData* data)
{
itm->SetClientObject(data);
}
wxClientData* wxRibbonGallery::GetItemClientObject(const wxRibbonGalleryItem* itm) const
{
return itm->GetClientObject();
}
void wxRibbonGallery::SetItemClientData(wxRibbonGalleryItem* itm, void* data)
{
itm->SetClientData(data);
}
void* wxRibbonGallery::GetItemClientData(const wxRibbonGalleryItem* itm) const
{
return itm->GetClientData();
}
bool wxRibbonGallery::ScrollLines(int lines)
{
if(m_scroll_limit == 0 || m_art == NULL)
return false;
return ScrollPixels(lines * GetScrollLineSize());
}
int wxRibbonGallery::GetScrollLineSize() const
{
if(m_art == NULL)
return 32;
int line_size = m_bitmap_padded_size.GetHeight();
if(m_art->GetFlags() & wxRIBBON_BAR_FLOW_VERTICAL)
line_size = m_bitmap_padded_size.GetWidth();
return line_size;
}
bool wxRibbonGallery::ScrollPixels(int pixels)
{
if(m_scroll_limit == 0 || m_art == NULL)
return false;
if(pixels < 0)
{
if(m_scroll_amount > 0)
{
m_scroll_amount += pixels;
if(m_scroll_amount <= 0)
{
m_scroll_amount = 0;
m_up_button_state = wxRIBBON_GALLERY_BUTTON_DISABLED;
}
else if(m_up_button_state == wxRIBBON_GALLERY_BUTTON_DISABLED)
m_up_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(m_down_button_state == wxRIBBON_GALLERY_BUTTON_DISABLED)
m_down_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
return true;
}
}
else if(pixels > 0)
{
if(m_scroll_amount < m_scroll_limit)
{
m_scroll_amount += pixels;
if(m_scroll_amount >= m_scroll_limit)
{
m_scroll_amount = m_scroll_limit;
m_down_button_state = wxRIBBON_GALLERY_BUTTON_DISABLED;
}
else if(m_down_button_state == wxRIBBON_GALLERY_BUTTON_DISABLED)
m_down_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(m_up_button_state == wxRIBBON_GALLERY_BUTTON_DISABLED)
m_up_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
return true;
}
}
return false;
}
void wxRibbonGallery::EnsureVisible(const wxRibbonGalleryItem* item)
{
if(item == NULL || !item->IsVisible() || IsEmpty())
return;
if(m_art->GetFlags() & wxRIBBON_BAR_FLOW_VERTICAL)
{
int x = item->GetPosition().GetLeft();
int base_x = m_items.Item(0)->GetPosition().GetLeft();
int delta = x - base_x - m_scroll_amount;
ScrollLines(delta / m_bitmap_padded_size.GetWidth());
}
else
{
int y = item->GetPosition().GetTop();
int base_y = m_items.Item(0)->GetPosition().GetTop();
int delta = y - base_y - m_scroll_amount;
ScrollLines(delta / m_bitmap_padded_size.GetHeight());
}
}
bool wxRibbonGallery::IsHovered() const
{
return m_hovered;
}
void wxRibbonGallery::OnEraseBackground(wxEraseEvent& WXUNUSED(evt))
{
// All painting done in main paint handler to minimise flicker
}
void wxRibbonGallery::OnPaint(wxPaintEvent& WXUNUSED(evt))
{
wxAutoBufferedPaintDC dc(this);
if(m_art == NULL)
return;
m_art->DrawGalleryBackground(dc, this, GetSize());
int padding_top = m_art->GetMetric(wxRIBBON_ART_GALLERY_BITMAP_PADDING_TOP_SIZE);
int padding_left = m_art->GetMetric(wxRIBBON_ART_GALLERY_BITMAP_PADDING_LEFT_SIZE);
dc.SetClippingRegion(m_client_rect);
bool offset_vertical = true;
if(m_art->GetFlags() & wxRIBBON_BAR_FLOW_VERTICAL)
offset_vertical = false;
size_t item_count = m_items.Count();
size_t item_i;
for(item_i = 0; item_i < item_count; ++item_i)
{
wxRibbonGalleryItem *item = m_items.Item(item_i);
if(!item->IsVisible())
continue;
const wxRect& pos = item->GetPosition();
wxRect offset_pos(pos);
if(offset_vertical)
offset_pos.SetTop(offset_pos.GetTop() - m_scroll_amount);
else
offset_pos.SetLeft(offset_pos.GetLeft() - m_scroll_amount);
m_art->DrawGalleryItemBackground(dc, this, offset_pos, item);
dc.DrawBitmap(item->GetBitmap(), offset_pos.GetLeft() + padding_left,
offset_pos.GetTop() + padding_top);
}
}
void wxRibbonGallery::OnSize(wxSizeEvent& WXUNUSED(evt))
{
Layout();
}
wxRibbonGalleryItem* wxRibbonGallery::Append(const wxBitmap& bitmap, int id)
{
wxASSERT(bitmap.IsOk());
if(m_items.IsEmpty())
{
m_bitmap_size = bitmap.GetSize();
CalculateMinSize();
}
else
{
wxASSERT(bitmap.GetSize() == m_bitmap_size);
}
wxRibbonGalleryItem *item = new wxRibbonGalleryItem;
item->SetId(id);
item->SetBitmap(bitmap);
m_items.Add(item);
return item;
}
wxRibbonGalleryItem* wxRibbonGallery::Append(const wxBitmap& bitmap, int id,
void* clientData)
{
wxRibbonGalleryItem *item = Append(bitmap, id);
item->SetClientData(clientData);
return item;
}
wxRibbonGalleryItem* wxRibbonGallery::Append(const wxBitmap& bitmap, int id,
wxClientData* clientData)
{
wxRibbonGalleryItem *item = Append(bitmap, id);
item->SetClientObject(clientData);
return item;
}
void wxRibbonGallery::Clear()
{
size_t item_count = m_items.Count();
size_t item_i;
for(item_i = 0; item_i < item_count; ++item_i)
{
wxRibbonGalleryItem *item = m_items.Item(item_i);
delete item;
}
m_items.Clear();
}
bool wxRibbonGallery::IsSizingContinuous() const
{
return false;
}
void wxRibbonGallery::CalculateMinSize()
{
if(m_art == NULL || !m_bitmap_size.IsFullySpecified())
{
SetMinSize(wxSize(20, 20));
}
else
{
m_bitmap_padded_size = m_bitmap_size;
m_bitmap_padded_size.IncBy(
m_art->GetMetric(wxRIBBON_ART_GALLERY_BITMAP_PADDING_LEFT_SIZE) +
m_art->GetMetric(wxRIBBON_ART_GALLERY_BITMAP_PADDING_RIGHT_SIZE),
m_art->GetMetric(wxRIBBON_ART_GALLERY_BITMAP_PADDING_TOP_SIZE) +
m_art->GetMetric(wxRIBBON_ART_GALLERY_BITMAP_PADDING_BOTTOM_SIZE));
wxMemoryDC dc;
SetMinSize(m_art->GetGallerySize(dc, this, m_bitmap_padded_size));
// The best size is displaying several items
m_best_size = m_bitmap_padded_size;
m_best_size.x *= 3;
m_best_size = m_art->GetGallerySize(dc, this, m_best_size);
}
}
bool wxRibbonGallery::Realize()
{
CalculateMinSize();
return Layout();
}
bool wxRibbonGallery::Layout()
{
if(m_art == NULL)
return false;
wxMemoryDC dc;
wxPoint origin;
wxSize client_size = m_art->GetGalleryClientSize(dc, this, GetSize(),
&origin, &m_scroll_up_button_rect, &m_scroll_down_button_rect,
&m_extension_button_rect);
m_client_rect = wxRect(origin, client_size);
int x_cursor = 0;
int y_cursor = 0;
size_t item_count = m_items.Count();
size_t item_i;
long art_flags = m_art->GetFlags();
for(item_i = 0; item_i < item_count; ++item_i)
{
wxRibbonGalleryItem *item = m_items.Item(item_i);
item->SetIsVisible(true);
if(art_flags & wxRIBBON_BAR_FLOW_VERTICAL)
{
if(y_cursor + m_bitmap_padded_size.y > client_size.GetHeight())
{
if(y_cursor == 0)
break;
y_cursor = 0;
x_cursor += m_bitmap_padded_size.x;
}
item->SetPosition(origin.x + x_cursor, origin.y + y_cursor,
m_bitmap_padded_size);
y_cursor += m_bitmap_padded_size.y;
}
else
{
if(x_cursor + m_bitmap_padded_size.x > client_size.GetWidth())
{
if(x_cursor == 0)
break;
x_cursor = 0;
y_cursor += m_bitmap_padded_size.y;
}
item->SetPosition(origin.x + x_cursor, origin.y + y_cursor,
m_bitmap_padded_size);
x_cursor += m_bitmap_padded_size.x;
}
}
for(; item_i < item_count; ++item_i)
{
wxRibbonGalleryItem *item = m_items.Item(item_i);
item->SetIsVisible(false);
}
if(art_flags & wxRIBBON_BAR_FLOW_VERTICAL)
m_scroll_limit = x_cursor;
else
m_scroll_limit = y_cursor;
if(m_scroll_amount >= m_scroll_limit)
{
m_scroll_amount = m_scroll_limit;
m_down_button_state = wxRIBBON_GALLERY_BUTTON_DISABLED;
}
else if(m_down_button_state == wxRIBBON_GALLERY_BUTTON_DISABLED)
m_down_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
if(m_scroll_amount <= 0)
{
m_scroll_amount = 0;
m_up_button_state = wxRIBBON_GALLERY_BUTTON_DISABLED;
}
else if(m_up_button_state == wxRIBBON_GALLERY_BUTTON_DISABLED)
m_up_button_state = wxRIBBON_GALLERY_BUTTON_NORMAL;
return true;
}
wxSize wxRibbonGallery::DoGetBestSize() const
{
return m_best_size;
}
wxSize wxRibbonGallery::DoGetNextSmallerSize(wxOrientation direction,
wxSize relative_to) const
{
if(m_art == NULL)
return relative_to;
wxMemoryDC dc;
wxSize client = m_art->GetGalleryClientSize(dc, this, relative_to, NULL,
NULL, NULL, NULL);
switch(direction)
{
case wxHORIZONTAL:
client.DecBy(1, 0);
break;
case wxVERTICAL:
client.DecBy(0, 1);
break;
case wxBOTH:
client.DecBy(1, 1);
break;
}
if(client.GetWidth() < 0 || client.GetHeight() < 0)
return relative_to;
client.x = (client.x / m_bitmap_padded_size.x) * m_bitmap_padded_size.x;
client.y = (client.y / m_bitmap_padded_size.y) * m_bitmap_padded_size.y;
wxSize size = m_art->GetGallerySize(dc, this, client);
wxSize minimum = GetMinSize();
if(size.GetWidth() < minimum.GetWidth() ||
size.GetHeight() < minimum.GetHeight())
{
return relative_to;
}
switch(direction)
{
case wxHORIZONTAL:
size.SetHeight(relative_to.GetHeight());
break;
case wxVERTICAL:
size.SetWidth(relative_to.GetWidth());
break;
default:
break;
}
return size;
}
wxSize wxRibbonGallery::DoGetNextLargerSize(wxOrientation direction,
wxSize relative_to) const
{
if(m_art == NULL)
return relative_to;
wxMemoryDC dc;
wxSize client = m_art->GetGalleryClientSize(dc, this, relative_to, NULL,
NULL, NULL, NULL);
// No need to grow if the given size can already display every item
int nitems = (client.GetWidth() / m_bitmap_padded_size.x) *
(client.GetHeight() / m_bitmap_padded_size.y);
if(nitems >= (int)m_items.GetCount())
return relative_to;
switch(direction)
{
case wxHORIZONTAL:
client.IncBy(m_bitmap_padded_size.x, 0);
break;
case wxVERTICAL:
client.IncBy(0, m_bitmap_padded_size.y);
break;
case wxBOTH:
client.IncBy(m_bitmap_padded_size);
break;
}
client.x = (client.x / m_bitmap_padded_size.x) * m_bitmap_padded_size.x;
client.y = (client.y / m_bitmap_padded_size.y) * m_bitmap_padded_size.y;
wxSize size = m_art->GetGallerySize(dc, this, client);
wxSize minimum = GetMinSize();
if(size.GetWidth() < minimum.GetWidth() ||
size.GetHeight() < minimum.GetHeight())
{
return relative_to;
}
switch(direction)
{
case wxHORIZONTAL:
size.SetHeight(relative_to.GetHeight());
break;
case wxVERTICAL:
size.SetWidth(relative_to.GetWidth());
break;
default:
break;
}
return size;
}
bool wxRibbonGallery::IsEmpty() const
{
return m_items.IsEmpty();
}
unsigned int wxRibbonGallery::GetCount() const
{
return (unsigned int)m_items.GetCount();
}
wxRibbonGalleryItem* wxRibbonGallery::GetItem(unsigned int n)
{
if(n >= GetCount())
return NULL;
return m_items.Item(n);
}
void wxRibbonGallery::SetSelection(wxRibbonGalleryItem* item)
{
if(item != m_selected_item)
{
m_selected_item = item;
Refresh(false);
}
}
wxRibbonGalleryItem* wxRibbonGallery::GetSelection() const
{
return m_selected_item;
}
wxRibbonGalleryItem* wxRibbonGallery::GetHoveredItem() const
{
return m_hovered_item;
}
wxRibbonGalleryItem* wxRibbonGallery::GetActiveItem() const
{
return m_active_item;
}
wxRibbonGalleryButtonState wxRibbonGallery::GetUpButtonState() const
{
return m_up_button_state;
}
wxRibbonGalleryButtonState wxRibbonGallery::GetDownButtonState() const
{
return m_down_button_state;
}
wxRibbonGalleryButtonState wxRibbonGallery::GetExtensionButtonState() const
{
return m_extension_button_state;
}
#endif // wxUSE_RIBBON<|fim▁end|> | if(m_active_item)
{
if(m_art && m_art->GetFlags() & wxRIBBON_BAR_FLOW_VERTICAL)
pos.x += m_scroll_amount;
|
<|file_name|>EventFOV.java<|end_file_name|><|fim▁begin|>package ca.wescook.wateringcans.events;
import ca.wescook.wateringcans.potions.ModPotions;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraftforge.client.event.FOVUpdateEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class EventFOV {
@SubscribeEvent
public void fovUpdates(FOVUpdateEvent event) {
// Get player object
EntityPlayer player = event.getEntity();
if (player.getActivePotionEffect(ModPotions.inhibitFOV) != null) {<|fim▁hole|> float capableSpeed = player.capabilities.getWalkSpeed();
float fov = event.getFov();
// Disable FOV change
event.setNewfov((float) (fov / ((playerSpeed / capableSpeed + 1.0) / 2.0)));
}
}
}<|fim▁end|> | // Get player data
double playerSpeed = player.getEntityAttribute(SharedMonsterAttributes.MOVEMENT_SPEED).getAttributeValue(); |
<|file_name|>documenttype.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DocumentTypeBinding;
use dom::bindings::codegen::Bindings::DocumentTypeBinding::DocumentTypeMethods;
use dom::bindings::codegen::InheritTypes::{DocumentTypeDerived, NodeCast};
use dom::bindings::js::{JSRef, Temporary};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId};
use util::str::DOMString;
use std::borrow::ToOwned;
/// The `DOCTYPE` tag.
#[dom_struct]
pub struct DocumentType {
node: Node,
name: DOMString,
public_id: DOMString,
system_id: DOMString,
}
impl DocumentTypeDerived for EventTarget {<|fim▁hole|> fn is_documenttype(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::DocumentType)
}
}
impl DocumentType {
fn new_inherited(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> DocumentType {
DocumentType {
node: Node::new_inherited(NodeTypeId::DocumentType, document),
name: name,
public_id: public_id.unwrap_or("".to_owned()),
system_id: system_id.unwrap_or("".to_owned())
}
}
#[allow(unrooted_must_root)]
pub fn new(name: DOMString,
public_id: Option<DOMString>,
system_id: Option<DOMString>,
document: JSRef<Document>)
-> Temporary<DocumentType> {
let documenttype = DocumentType::new_inherited(name,
public_id,
system_id,
document);
Node::reflect_node(box documenttype, document, DocumentTypeBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a DOMString {
&self.name
}
#[inline]
pub fn public_id<'a>(&'a self) -> &'a DOMString {
&self.public_id
}
#[inline]
pub fn system_id<'a>(&'a self) -> &'a DOMString {
&self.system_id
}
}
impl<'a> DocumentTypeMethods for JSRef<'a, DocumentType> {
fn Name(self) -> DOMString {
self.name.clone()
}
fn PublicId(self) -> DOMString {
self.public_id.clone()
}
fn SystemId(self) -> DOMString {
self.system_id.clone()
}
// http://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(self) {
let node: JSRef<Node> = NodeCast::from_ref(self);
node.remove_self();
}
}<|fim▁end|> | |
<|file_name|>dashboard.ctrl.js<|end_file_name|><|fim▁begin|>(function(){
'use strict';
angular.module('GamemasterApp')
.controller('DashboardCtrl', function ($scope, $timeout, $mdSidenav, $http) {
$scope.users = ['Fabio', 'Leonardo', 'Thomas', 'Gabriele', 'Fabrizio', 'John', 'Luis', 'Kate', 'Max'];<|fim▁hole|> })
})();<|fim▁end|> | |
<|file_name|>generate.py<|end_file_name|><|fim▁begin|>import click
from .. import templates<|fim▁hole|> pass
@generate.command()
@click.argument('name')
@click.option('--path', required=True)
@click.pass_context
def blueprint(ctx, name, path):
app = ctx.obj
templates.extract_template(
'snippets/blueprint.py',
app.get_blueprint_directory().join(name + '.py'),
ctx={
'blueprint': {'name': name, 'path': path}})<|fim▁end|> |
@click.group()
def generate(): |
<|file_name|>about_loader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use file_loader;
use hyper::header::ContentType;
use hyper::http::RawStatus;
use hyper::mime::{Mime, SubLevel, TopLevel};
use mime_classifier::MIMEClassifier;<|fim▁hole|>use net_traits::{LoadConsumer, LoadData, Metadata};
use resource_task::{CancellationListener, send_error, start_sending_sniffed_opt};
use std::sync::Arc;
use url::Url;
use util::resource_files::resources_dir_path;
pub fn factory(mut load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MIMEClassifier>,
cancel_listener: CancellationListener) {
let url = load_data.url.clone();
let non_relative_scheme_data = url.non_relative_scheme_data().unwrap();
match non_relative_scheme_data {
"blank" => {
let metadata = Metadata {
final_url: load_data.url,
content_type: Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, vec![]))),
charset: Some("utf-8".to_owned()),
headers: None,
status: Some(RawStatus(200, "OK".into())),
};
if let Ok(chan) = start_sending_sniffed_opt(start_chan,
metadata,
classifier,
&[],
load_data.context) {
let _ = chan.send(Done(Ok(())));
}
return
}
"crash" => panic!("Loading the about:crash URL."),
"failure" | "not-found" => {
let mut path = resources_dir_path();
let file_name = non_relative_scheme_data.to_owned() + ".html";
path.push(&file_name);
assert!(path.exists());
load_data.url = Url::from_file_path(&*path).unwrap();
}
_ => {
send_error(load_data.url, "Unknown about: URL.".to_owned(), start_chan);
return
}
};
file_loader::factory(load_data, start_chan, classifier, cancel_listener)
}<|fim▁end|> | use net_traits::ProgressMsg::Done; |
<|file_name|>pagelayout.py<|end_file_name|><|fim▁begin|>"""
PageLayout
==========
.. image:: images/pagelayout.gif
:align: right
The :class:`PageLayout` class is used to create a simple multi-page
layout, in a way that allows easy flipping from one page to another using
borders.
:class:`PageLayout` does not currently honor the
:attr:`~kivy.uix.widget.Widget.size_hint`,
:attr:`~kivy.uix.widget.Widget.size_hint_min`,
:attr:`~kivy.uix.widget.Widget.size_hint_max`, or
:attr:`~kivy.uix.widget.Widget.pos_hint` properties.
.. versionadded:: 1.8.0
Example:
.. code-block:: kv
PageLayout:
Button:
text: 'page1'
Button:
text: 'page2'
Button:
text: 'page3'
Transitions from one page to the next are made by swiping in from the border
areas on the right or left hand side. If you wish to display multiple widgets
in a page, we suggest you use a containing layout. Ideally, each page should
consist of a single :mod:`~kivy.uix.layout` widget that contains the remaining
widgets on that page.
"""
__all__ = ('PageLayout', )
from kivy.uix.layout import Layout
from kivy.properties import NumericProperty, DictProperty
from kivy.animation import Animation
class PageLayout(Layout):
'''PageLayout class. See module documentation for more information.
'''
page = NumericProperty(0)
'''The currently displayed page.
:data:`page` is a :class:`~kivy.properties.NumericProperty` and defaults
to 0.
'''
border = NumericProperty('50dp')
'''The width of the border around the current page used to display
the previous/next page swipe areas when needed.
:data:`border` is a :class:`~kivy.properties.NumericProperty` and
defaults to 50dp.
'''
swipe_threshold = NumericProperty(.5)
'''The threshold used to trigger swipes as ratio of the widget
size.
:data:`swipe_threshold` is a :class:`~kivy.properties.NumericProperty`
and defaults to .5.
'''
anim_kwargs = DictProperty({'d': .5, 't': 'in_quad'})
'''The animation kwargs used to construct the animation
:data:`anim_kwargs` is a :class:`~kivy.properties.DictProperty`
and defaults to {'d': .5, 't': 'in_quad'}.
.. versionadded:: 1.11.0
'''
def __init__(self, **kwargs):
super(PageLayout, self).__init__(**kwargs)
trigger = self._trigger_layout
fbind = self.fbind
fbind('border', trigger)
fbind('page', trigger)
fbind('parent', trigger)
fbind('children', trigger)
fbind('size', trigger)
fbind('pos', trigger)
def do_layout(self, *largs):
l_children = len(self.children) - 1
h = self.height
x_parent, y_parent = self.pos
p = self.page
border = self.border
half_border = border / 2.
right = self.right
width = self.width - border
for i, c in enumerate(reversed(self.children)):
if i < p:
x = x_parent
elif i == p:
if not p: # it's first page
x = x_parent
elif p != l_children: # not first, but there are post pages
x = x_parent + half_border
else: # not first and there are no post pages
x = x_parent + border
elif i == p + 1:
if not p: # second page - no left margin
x = right - border
else: # there's already a left margin
x = right - half_border
else:
x = right
c.height = h
c.width = width
Animation(
x=x,
y=y_parent,
**self.anim_kwargs).start(c)
def on_touch_down(self, touch):
if (
self.disabled or
not self.collide_point(*touch.pos) or
not self.children
):
return
page = self.children[-self.page - 1]
if self.x <= touch.x < page.x:
touch.ud['page'] = 'previous'
touch.grab(self)
return True
elif page.right <= touch.x < self.right:
touch.ud['page'] = 'next'
touch.grab(self)
return True
return page.on_touch_down(touch)
def on_touch_move(self, touch):
if touch.grab_current != self:
return
p = self.page
border = self.border
half_border = border / 2.<|fim▁hole|> # move next page up to right edge
if p < len(self.children) - 1:
self.children[-p - 2].x = min(
self.right - self.border * (1 - (touch.sx - touch.osx)),
self.right)
# move current page until edge hits the right border
if p >= 1:
b_right = half_border if p > 1 else border
b_left = half_border if p < len(self.children) - 1 else border
self.children[-p - 1].x = max(min(
self.x + b_left + (touch.x - touch.ox),
self.right - b_right),
self.x + b_left)
# move previous page left edge up to left border
if p > 1:
self.children[-p].x = min(
self.x + half_border * (touch.sx - touch.osx),
self.x + half_border)
elif touch.ud['page'] == 'next':
# move current page up to left edge
if p >= 1:
self.children[-p - 1].x = max(
self.x + half_border * (1 - (touch.osx - touch.sx)),
self.x)
# move next page until its edge hit the left border
if p < len(self.children) - 1:
b_right = half_border if p >= 1 else border
b_left = half_border if p < len(self.children) - 2 else border
self.children[-p - 2].x = min(max(
self.right - b_right + (touch.x - touch.ox),
self.x + b_left),
self.right - b_right)
# move second next page up to right border
if p < len(self.children) - 2:
self.children[-p - 3].x = max(
self.right + half_border * (touch.sx - touch.osx),
self.right - half_border)
return page.on_touch_move(touch)
def on_touch_up(self, touch):
if touch.grab_current == self:
if (
touch.ud['page'] == 'previous' and
abs(touch.x - touch.ox) / self.width > self.swipe_threshold
):
self.page -= 1
elif (
touch.ud['page'] == 'next' and
abs(touch.x - touch.ox) / self.width > self.swipe_threshold
):
self.page += 1
else:
self._trigger_layout()
touch.ungrab(self)
if len(self.children) > 1:
return self.children[-self.page + 1].on_touch_up(touch)
if __name__ == '__main__':
from kivy.base import runTouchApp
from kivy.uix.button import Button
pl = PageLayout()
for i in range(1, 4):
b = Button(text='page%s' % i)
pl.add_widget(b)
runTouchApp(pl)<|fim▁end|> | page = self.children[-p - 1]
if touch.ud['page'] == 'previous': |
<|file_name|>fingerprint.rs<|end_file_name|><|fim▁begin|>use std::fs::{self, File, OpenOptions};
use std::io::prelude::*;
use std::io::{BufReader, SeekFrom};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use filetime::FileTime;
use core::{Package, Target, Profile};
use util;
use util::{CargoResult, Fresh, Dirty, Freshness, internal, profile, ChainError};
use super::Kind;
use super::job::Work;
use super::context::Context;
/// A tuple result of the `prepare_foo` functions in this module.
///
/// The first element of the triple is whether the target in question is
/// currently fresh or not, and the second two elements are work to perform when
/// the target is dirty or fresh, respectively.
///
/// Both units of work are always generated because a fresh package may still be
/// rebuilt if some upstream dependency changes.
pub type Preparation = (Freshness, Work, Work);
/// Prepare the necessary work for the fingerprint for a specific target.
///
/// When dealing with fingerprints, cargo gets to choose what granularity
/// "freshness" is considered at. One option is considering freshness at the
/// package level. This means that if anything in a package changes, the entire
/// package is rebuilt, unconditionally. This simplicity comes at a cost,
/// however, in that test-only changes will cause libraries to be rebuilt, which
/// is quite unfortunate!
///
/// The cost was deemed high enough that fingerprints are now calculated at the
/// layer of a target rather than a package. Each target can then be kept track
/// of separately and only rebuilt as necessary. This requires cargo to
/// understand what the inputs are to a target, so we drive rustc with the
/// --dep-info flag to learn about all input files to a unit of compilation.
///
/// This function will calculate the fingerprint for a target and prepare the
/// work necessary to either write the fingerprint or copy over all fresh files
/// from the old directories to their new locations.
pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
pkg: &'a Package,
target: &'a Target,
profile: &'a Profile,
kind: Kind) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint: {} / {}",
pkg.package_id(), target.name()));<|fim▁hole|>
let mut fingerprint = try!(calculate(cx, pkg, target, profile, kind));
let is_fresh = try!(is_fresh(&loc, &mut fingerprint));
let root = cx.out_dir(pkg, kind, target);
let mut missing_outputs = false;
if !profile.doc {
for filename in try!(cx.target_filenames(pkg, target, profile,
kind)).iter() {
missing_outputs |= fs::metadata(root.join(filename)).is_err();
}
}
let allow_failure = profile.rustc_args.is_some();
Ok(prepare(is_fresh && !missing_outputs, allow_failure, loc, fingerprint))
}
/// A fingerprint can be considered to be a "short string" representing the
/// state of a world for a package.
///
/// If a fingerprint ever changes, then the package itself needs to be
/// recompiled. Inputs to the fingerprint include source code modifications,
/// compiler flags, compiler version, etc. This structure is not simply a
/// `String` due to the fact that some fingerprints cannot be calculated lazily.
///
/// Path sources, for example, use the mtime of the corresponding dep-info file
/// as a fingerprint (all source files must be modified *before* this mtime).
/// This dep-info file is not generated, however, until after the crate is
/// compiled. As a result, this structure can be thought of as a fingerprint
/// to-be. The actual value can be calculated via `resolve()`, but the operation
/// may fail as some files may not have been generated.
///
/// Note that dependencies are taken into account for fingerprints because rustc
/// requires that whenever an upstream crate is recompiled that all downstream
/// dependants are also recompiled. This is typically tracked through
/// `DependencyQueue`, but it also needs to be retained here because Cargo can
/// be interrupted while executing, losing the state of the `DependencyQueue`
/// graph.
pub type Fingerprint = Arc<FingerprintInner>;
struct FingerprintInner {
extra: String,
deps: Vec<Fingerprint>,
local: LocalFingerprint,
resolved: Mutex<Option<String>>,
}
#[derive(Clone)]
enum LocalFingerprint {
Precalculated(String),
MtimeBased(Option<FileTime>, PathBuf),
}
impl FingerprintInner {
fn resolve(&self, force: bool) -> CargoResult<String> {
if !force {
if let Some(ref s) = *self.resolved.lock().unwrap() {
return Ok(s.clone())
}
}
let mut deps: Vec<_> = try!(self.deps.iter().map(|s| {
s.resolve(force)
}).collect());
deps.sort();
let known = match self.local {
LocalFingerprint::Precalculated(ref s) => s.clone(),
LocalFingerprint::MtimeBased(Some(n), _) if !force => n.to_string(),
LocalFingerprint::MtimeBased(_, ref p) => {
debug!("resolving: {}", p.display());
let meta = try!(fs::metadata(p));
FileTime::from_last_modification_time(&meta).to_string()
}
};
let resolved = util::short_hash(&(&known, &self.extra, &deps));
debug!("inputs: {} {} {:?} => {}", known, self.extra, deps, resolved);
*self.resolved.lock().unwrap() = Some(resolved.clone());
Ok(resolved)
}
}
/// Calculates the fingerprint for a package/target pair.
///
/// This fingerprint is used by Cargo to learn about when information such as:
///
/// * A non-path package changes (changes version, changes revision, etc).
/// * Any dependency changes
/// * The compiler changes
/// * The set of features a package is built with changes
/// * The profile a target is compiled with changes (e.g. opt-level changes)
///
/// Information like file modification time is only calculated for path
/// dependencies and is calculated in `calculate_target_fresh`.
fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
pkg: &'a Package,
target: &'a Target,
profile: &'a Profile,
kind: Kind)
-> CargoResult<Fingerprint> {
let key = (pkg.package_id(), target, profile, kind);
match cx.fingerprints.get(&key) {
Some(s) => return Ok(s.clone()),
None => {}
}
// First, calculate all statically known "salt data" such as the profile
// information (compiler flags), the compiler version, activated features,
// and target configuration.
let features = cx.resolve.features(pkg.package_id());
let features = features.map(|s| {
let mut v = s.iter().collect::<Vec<&String>>();
v.sort();
v
});
let extra = util::short_hash(&(cx.config.rustc_version(), target, &features,
profile));
debug!("extra {:?} {:?} {:?} = {}", target, profile, features, extra);
// Next, recursively calculate the fingerprint for all of our dependencies.
//
// Skip the fingerprints of build scripts as they may not always be
// available and the dirtiness propagation for modification is tracked
// elsewhere. Also skip fingerprints of binaries because they don't actually
// induce a recompile, they're just dependencies in the sense that they need
// to be built.
let deps = try!(cx.dep_targets(pkg, target, profile).into_iter()
.filter(|&(_, t, _)| !t.is_custom_build() && !t.is_bin())
.map(|(pkg, target, profile)| {
let kind = match kind {
Kind::Host => Kind::Host,
Kind::Target if target.for_host() => Kind::Host,
Kind::Target => Kind::Target,
};
calculate(cx, pkg, target, profile, kind)
}).collect::<CargoResult<Vec<_>>>());
// And finally, calculate what our own local fingerprint is
let local = if use_dep_info(pkg, profile) {
let dep_info = dep_info_loc(cx, pkg, target, profile, kind);
let mtime = try!(calculate_target_mtime(&dep_info));
// if the mtime listed is not fresh, then remove the `dep_info` file to
// ensure that future calls to `resolve()` won't work.
if mtime.is_none() {
let _ = fs::remove_file(&dep_info);
}
LocalFingerprint::MtimeBased(mtime, dep_info)
} else {
LocalFingerprint::Precalculated(try!(calculate_pkg_fingerprint(cx, pkg)))
};
let fingerprint = Arc::new(FingerprintInner {
extra: extra,
deps: deps,
local: local,
resolved: Mutex::new(None),
});
cx.fingerprints.insert(key, fingerprint.clone());
Ok(fingerprint)
}
// We want to use the mtime for files if we're a path source, but if we're a
// git/registry source, then the mtime of files may fluctuate, but they won't
// change so long as the source itself remains constant (which is the
// responsibility of the source)
fn use_dep_info(pkg: &Package, profile: &Profile) -> bool {
let path = pkg.summary().source_id().is_path();
!profile.doc && path
}
/// Prepare the necessary work for the fingerprint of a build command.
///
/// Build commands are located on packages, not on targets. Additionally, we
/// don't have --dep-info to drive calculation of the fingerprint of a build
/// command. This brings up an interesting predicament which gives us a few
/// options to figure out whether a build command is dirty or not:
///
/// 1. A build command is dirty if *any* file in a package changes. In theory
/// all files are candidate for being used by the build command.
/// 2. A build command is dirty if any file in a *specific directory* changes.
/// This may lose information as it may require files outside of the specific
/// directory.
/// 3. A build command must itself provide a dep-info-like file stating how it
/// should be considered dirty or not.
///
/// The currently implemented solution is option (1), although it is planned to
/// migrate to option (2) in the near future.
pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, kind: Kind)
-> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint build cmd: {}",
pkg.package_id()));
let new = dir(cx, pkg, kind);
let loc = new.join("build");
info!("fingerprint at: {}", loc.display());
let new_fingerprint = try!(calculate_build_cmd_fingerprint(cx, pkg));
let new_fingerprint = Arc::new(FingerprintInner {
extra: String::new(),
deps: Vec::new(),
local: LocalFingerprint::Precalculated(new_fingerprint),
resolved: Mutex::new(None),
});
let is_fresh = try!(is_fresh(&loc, &new_fingerprint));
Ok(prepare(is_fresh, false, loc, new_fingerprint))
}
/// Prepare work for when a package starts to build
pub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind)
-> (Work, Work) {
let new1 = dir(cx, pkg, kind);
let new2 = new1.clone();
let work1 = Work::new(move |_| {
if fs::metadata(&new1).is_err() {
try!(fs::create_dir(&new1));
}
Ok(())
});
let work2 = Work::new(move |_| {
if fs::metadata(&new2).is_err() {
try!(fs::create_dir(&new2));
}
Ok(())
});
(work1, work2)
}
/// Given the data to build and write a fingerprint, generate some Work
/// instances to actually perform the necessary work.
fn prepare(is_fresh: bool,
allow_failure: bool,
loc: PathBuf,
fingerprint: Fingerprint) -> Preparation {
let write_fingerprint = Work::new(move |_| {
debug!("write fingerprint: {}", loc.display());
let fingerprint = fingerprint.resolve(true).chain_error(|| {
internal("failed to resolve a pending fingerprint")
});
let fingerprint = match fingerprint {
Ok(f) => f,
Err(..) if allow_failure => return Ok(()),
Err(e) => return Err(e),
};
let mut f = try!(File::create(&loc));
try!(f.write_all(fingerprint.as_bytes()));
Ok(())
});
(if is_fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop())
}
/// Return the (old, new) location for fingerprints for a package
pub fn dir(cx: &Context, pkg: &Package, kind: Kind) -> PathBuf {
cx.layout(pkg, kind).proxy().fingerprint(pkg)
}
/// Returns the (old, new) location for the dep info file of a target.
pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target,
profile: &Profile, kind: Kind) -> PathBuf {
dir(cx, pkg, kind).join(&format!("dep-{}", filename(target, profile)))
}
fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<bool> {
let mut file = match File::open(loc) {
Ok(file) => file,
Err(..) => return Ok(false),
};
let mut old_fingerprint = String::new();
try!(file.read_to_string(&mut old_fingerprint));
let new_fingerprint = match new_fingerprint.resolve(false) {
Ok(s) => s,
Err(..) => return Ok(false),
};
trace!("old fingerprint: {}", old_fingerprint);
trace!("new fingerprint: {}", new_fingerprint);
Ok(old_fingerprint == new_fingerprint)
}
fn calculate_target_mtime(dep_info: &Path) -> CargoResult<Option<FileTime>> {
macro_rules! fs_try {
($e:expr) => (match $e { Ok(e) => e, Err(..) => return Ok(None) })
}
let mut f = BufReader::new(fs_try!(File::open(dep_info)));
// see comments in append_current_dir for where this cwd is manifested from.
let mut cwd = Vec::new();
fs_try!(f.read_until(0, &mut cwd));
let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1]));
let line = match f.lines().next() {
Some(Ok(line)) => line,
_ => return Ok(None),
};
let meta = try!(fs::metadata(&dep_info));
let mtime = FileTime::from_last_modification_time(&meta);
let pos = try!(line.find(": ").chain_error(|| {
internal(format!("dep-info not in an understood format: {}",
dep_info.display()))
}));
let deps = &line[pos + 2..];
let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty());
loop {
let mut file = match deps.next() {
Some(s) => s.to_string(),
None => break,
};
while file.ends_with("\\") {
file.pop();
file.push(' ');
file.push_str(deps.next().unwrap())
}
let meta = match fs::metadata(cwd.join(&file)) {
Ok(meta) => meta,
Err(..) => { info!("stale: {} -- missing", file); return Ok(None) }
};
let file_mtime = FileTime::from_last_modification_time(&meta);
if file_mtime > mtime {
info!("stale: {} -- {} vs {}", file, file_mtime, mtime);
return Ok(None)
}
}
Ok(Some(mtime))
}
fn calculate_build_cmd_fingerprint(cx: &Context, pkg: &Package)
-> CargoResult<String> {
// TODO: this should be scoped to just the `build` directory, not the entire
// package.
calculate_pkg_fingerprint(cx, pkg)
}
fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {
let source = cx.sources
.get(pkg.package_id().source_id())
.expect("BUG: Missing package source");
source.fingerprint(pkg)
}
fn filename(target: &Target, profile: &Profile) -> String {
let kind = if target.is_lib() {"lib"} else {"bin"};
let flavor = if target.is_test() || profile.test {
"test-"
} else if profile.doc {
"doc-"
} else {
""
};
format!("{}{}-{}", flavor, kind, target.name())
}
// The dep-info files emitted by the compiler all have their listed paths
// relative to whatever the current directory was at the time that the compiler
// was invoked. As the current directory may change over time, we need to record
// what that directory was at the beginning of the file so we can know about it
// next time.
pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> {
debug!("appending {} <- {}", path.display(), cwd.display());
let mut f = try!(OpenOptions::new().read(true).write(true).open(path));
let mut contents = Vec::new();
try!(f.read_to_end(&mut contents));
try!(f.seek(SeekFrom::Start(0)));
try!(f.write_all(try!(util::path2bytes(cwd))));
try!(f.write_all(&[0]));
try!(f.write_all(&contents));
Ok(())
}<|fim▁end|> | let new = dir(cx, pkg, kind);
let loc = new.join(&filename(target, profile));
info!("fingerprint at: {}", loc.display()); |
<|file_name|>exception_ex.py<|end_file_name|><|fim▁begin|>from cinder.exception import *
from cinder.i18n import _
class ProviderMultiVolumeError(CinderException):
msg_fmt = _("volume %(volume_id)s More than one provider_volume are found")
<|fim▁hole|>class ProviderMultiSnapshotError(CinderException):
msg_fmt = _("snapshot %(snapshot_id)s More than one provider_snapshot are found")
class ProviderCreateVolumeError(CinderException):
msg_fmt = _("volume %(volume_id)s create request failed,network or provider internal error")
class ProviderCreateSnapshotError(CinderException):
msg_fmt = _("snapshot %(snapshot_id)s create request failed,network or provider internal error")
class ProviderLocationError(CinderException):
msg_fmt = _("provider location error")
class ProviderExportVolumeError(CinderException):
msg_fmt = _("provider export volume error")
class ProviderVolumeNotFound(NotFound):
message = _("Volume %(volume_id)s could not be found.")
class VgwHostNotFound(NotFound):
message = _("node of %(Vgw_id)s at provider cloud could not be found.")<|fim▁end|> | |
<|file_name|>logging.go<|end_file_name|><|fim▁begin|>package event<|fim▁hole|>import (
"time"
"github.com/go-kit/kit/log"
)
type logService struct {
logger log.Logger
next Service
}
// LogServiceMiddleware given a Logger wraps the next Service with logging capabilities.
func LogServiceMiddleware(logger log.Logger, store string) ServiceMiddleware {
return func(next Service) Service {
logger = log.With(
logger,
"service", "event",
"store", store,
)
return &logService{logger: logger, next: next}
}
}
func (s *logService) Count(ns string, opts QueryOptions) (count int, err error) {
defer func(begin time.Time) {
ps := []interface{}{
"count", count,
"duration_ns", time.Since(begin).Nanoseconds(),
"method", "Count",
"namespace", ns,
"opts", opts,
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Count(ns, opts)
}
func (s *logService) Put(ns string, input *Event) (output *Event, err error) {
defer func(begin time.Time) {
ps := []interface{}{
"duration_ns", time.Since(begin).Nanoseconds(),
"input", input,
"method", "Put",
"namespace", ns,
"output", output,
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Put(ns, input)
}
func (s *logService) Query(ns string, opts QueryOptions) (list List, err error) {
defer func(begin time.Time) {
ps := []interface{}{
"datapoints", len(list),
"duration_ns", time.Since(begin).Nanoseconds(),
"method", "Query",
"namespace", ns,
"opts", opts,
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Query(ns, opts)
}
func (s *logService) Setup(ns string) (err error) {
defer func(begin time.Time) {
ps := []interface{}{
"duration_ns", time.Since(begin).Nanoseconds(),
"method", "Setup",
"namespace", ns,
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Setup(ns)
}
func (s *logService) Teardown(ns string) (err error) {
defer func(begin time.Time) {
ps := []interface{}{
"duration_ns", time.Since(begin).Nanoseconds(),
"method", "Teardown",
"namespace", ns,
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Teardown(ns)
}
type logSource struct {
logger log.Logger
next Source
}
// LogSourceMiddleware given a Logger wraps the next Source logging capabilities.
func LogSourceMiddleware(store string, logger log.Logger) SourceMiddleware {
return func(next Source) Source {
logger = log.With(
logger,
"source", "event",
"store", store,
)
return &logSource{
logger: logger,
next: next,
}
}
}
func (s *logSource) Ack(id string) (err error) {
defer func(begin time.Time) {
ps := []interface{}{
"ack_id", id,
"duration_ns", time.Since(begin).Nanoseconds(),
"method", "Ack",
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Ack(id)
}
func (s *logSource) Consume() (change *StateChange, err error) {
defer func(begin time.Time) {
ps := []interface{}{
"duration_ns", time.Since(begin).Nanoseconds(),
"method", "Consume",
}
if change != nil {
ps = append(ps,
"namespace", change.Namespace,
"event_new", change.New,
"event_old", change.Old,
)
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Consume()
}
func (s *logSource) Propagate(ns string, old, new *Event) (id string, err error) {
defer func(begin time.Time) {
ps := []interface{}{
"duration_ns", time.Since(begin).Nanoseconds(),
"id", id,
"method", "Propagate",
"namespace", ns,
"event_new", new,
"event_old", old,
}
if err != nil {
ps = append(ps, "err", err)
}
_ = s.logger.Log(ps...)
}(time.Now())
return s.next.Propagate(ns, old, new)
}<|fim▁end|> | |
<|file_name|>IFrameNavigator.js<|end_file_name|><|fim▁begin|>// Copyright (c) Brock Allen & Dominick Baier. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
import { Log } from './Log.js';
import { IFrameWindow } from './IFrameWindow.js';
export class IFrameNavigator {
prepare(params) {
let frame = new IFrameWindow(params);
return Promise.resolve(frame);
}
callback(url) {
Log.debug("IFrameNavigator.callback");
try {
IFrameWindow.notifyParent(url);
return Promise.resolve();
}<|fim▁hole|> catch (e) {
return Promise.reject(e);
}
}
}<|fim▁end|> | |
<|file_name|>cell-command-standard.py<|end_file_name|><|fim▁begin|>import sys, os, subprocess, tempfile, shlex, glob
result = None
d = None
def format_msg(message, headline):
msg = "Line {0}:\n {1}\n{2}:\n{3}"\
.format(PARAMS["lineno"], PARAMS["source"], headline, message)
return msg
try:
#print("RUN", PARAMS["source"])
d = tempfile.mkdtemp(dir="/dev/shm")
command_params = []
created_files = []
for ref in PARAMS["refs"]:
if isinstance(ref, str):
value = globals()[ref]
inp_type = PARAMS["inputs"][ref]
if inp_type == "variable":
command_params.append(shlex.quote(value))
elif inp_type == "doc":
filename = d + "/doc_" + ref
open(filename, "w").write(value)
created_files.append(filename)
command_params.append(filename)
else:
raise TypeError(inp_type)
else:
typ, value = ref["type"], ref["value"]
if typ == "env":
command_params.append(value)
elif typ == "file":
if value is None:
filename = "/dev/null"
else:
value = os.path.expanduser(value)
filename = os.path.abspath(value)
command_params.append(filename)
elif typ == "varexp":
refs = ref["refs"]
ref_values = []
for r in refs:
if not r.startswith("$"):
v = globals()[r]
else: #env variable
v = os.environ[r[1:]]
ref_values.append(v)
value = value.format(*ref_values)
command_params.append(shlex.quote(value))
else:
raise TypeError(typ)
command = [param.format(*command_params) \
for param in PARAMS["command"]]
stdout = None
stderr = subprocess.PIPE
capture = False
return_mode = []
print_stdout = True
print_stderr = True
for output in PARAMS["output_refs"]:
if output["type"] == "stdout":
stdout = subprocess.PIPE
stderr = subprocess.PIPE
print_stdout = False
if output["name"] is not None:
return_mode.append("stdout")
elif output["type"] == "stderr":
stdout = subprocess.PIPE
stderr = subprocess.PIPE
print_stderr = False
if output["name"] is not None:
return_mode.append("stderr")
elif output["type"] == "stdout+stderr":
stdout = subprocess.PIPE
stderr = subprocess.STDOUT
print_stdout = False
print_stderr = False
if output["name"] is not None:
return_mode.append("stdout")
elif output["type"] == "capture":
capture = True
return_mode.append("capture")
else:
raise TypeError(output["type"])
command = "cd %s;" % d + " ".join(command)
pragma = PARAMS.get("pragma", [])
monitor_delay = 2
monitor_preliminary = False
if "monitor" in pragma:
monitor_preliminary = True
monitor_delay = pragma[pragma.index("monitor")+1]
assert len(return_mode) <= 1, return_mode #TODO: stdout and stderr to different targets => return JSON
return_mode = return_mode[0] if len(return_mode) else None #TODO, see above
process = subprocess.Popen(command, stdout=stdout, stderr=stderr, shell=True)
last_stdout_data = b""
last_stderr_data = b""
while 1:
#print("MONITOR!")
try:
stdout_data, stderr_data = process.communicate(timeout=monitor_delay)
finished = True
except subprocess.TimeoutExpired:
finished = False
#TODO return_mode, see above
#dirty! but I don't know to do it better
stdout = process._fileobj2output[process.stdout]
curr_stdout_data = b''.join(stdout).decode("utf-8")
if len(curr_stdout_data) and \
curr_stdout_data != last_stdout_data:
if return_mode == "stdout" and process.stdout:
if monitor_preliminary:
return_preliminary(curr_stdout_data)
else:
sys.stdout.write(curr_stdout_data[len(last_stdout_data):])
last_stdout_data = curr_stdout_data
stderr = process._fileobj2output[process.stderr]
curr_stderr_data = b''.join(stderr).decode("utf-8")
if len(curr_stderr_data) and \
curr_stderr_data != last_stderr_data:<|fim▁hole|> else:
sys.stderr.write(curr_stderr_data[len(last_stderr_data):])
last_stderr_data = curr_stderr_data
if finished:
break
if stdout_data is not None:
stdout_data = stdout_data.decode("utf-8")
if stderr_data is not None:
stderr_data = stderr_data.decode("utf-8")
if process.returncode:
message = "Process exited with return code %d\n" % process.returncode
message += "Standard error:\n%s" % stderr_data
msg = format_msg(message, "Error message")
raise Exception(msg)
else:
if print_stdout and stdout_data is not None and len(stdout_data):
print(format_msg(stdout_data, "Standard output"))
#if print_stderr and len(stderr_data):
# print(format_msg(stderr_data, "Standard error"))
if capture:
new_files = []
for dirpath, dirnames, filenames in os.walk(d):
for filename in filenames:
new_file = os.path.join(dirpath, filename)
if new_file not in created_files:
new_files.append(new_file)
capture_data = {}
for f in new_files:
ff = f[len(d+"/"):]
capture_data[ff] = open(f).read()
#TODO return_mode, see above
if return_mode == "stdout":
result = stdout_data
elif return_mode == "stderr":
result = stderr_data
elif return_mode == "capture":
result = capture_data
finally:
if d is not None:
os.system("rm -rf %s" % d)
return result<|fim▁end|> | if return_mode == "stderr" and process.stderr:
if monitor_preliminary:
return_preliminary(curr_stderr_data) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>##
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Declaration of toolchains.linalg namespace.
@author: Stijn De Weirdt (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
from pkgutil import extend_path
# we're not the only ones in this namespace<|fim▁hole|>__path__ = extend_path(__path__, __name__) #@ReservedAssignment<|fim▁end|> | |
<|file_name|>binnode.rs<|end_file_name|><|fim▁begin|>use std::collections::Bitv;
use BinGraph;
/// Contains a binary state and the choices.
#[deriving(PartialEq, Eq, Show, Clone)]
pub struct BinNode {
/// The state composed of bits.
pub state: Bitv,
/// The choices represented as bits that can be flipped.
pub choices: Bitv
}
impl BinNode {
/// Gets pairs from pair of booleans.
pub fn from_pairs(pairs: &[(bool, bool)]) -> BinNode {
BinNode {
state: pairs.iter().map(|&(a, _)| a).collect(),
choices: pairs.iter().map(|&(_, b)| b).collect()
}<|fim▁hole|> #[inline(always)]
pub fn with_choices(&self, f: |i: uint|) {
for i in range(0, self.choices.len())
.zip(self.choices.iter())
.filter(|&(_, v)| v)
.map(|(i, _)| i
) {
f(i)
}
}
/// Calls closure for all choices that are not in graph.
#[inline(always)]
pub fn with_choices_not_in<TAction>(
&self,
graph: &BinGraph<TAction>,
f: |i: uint|
) {
self.with_choices(|i| if !graph.contains_choice(self, i) {
f(i)
})
}
}<|fim▁end|> | }
/// Call closure for each available choice. |
<|file_name|>ReadableList.java<|end_file_name|><|fim▁begin|>package com.salesmanager.shop.model.entity;
import java.io.Serializable;
public abstract class ReadableList implements Serializable {
/**
*
*/<|fim▁hole|> private int number;//number of record in current page
private long recordsTotal;//total number of records in db
private int recordsFiltered;
public int getTotalPages() {
return totalPages;
}
public void setTotalPages(int totalCount) {
this.totalPages = totalCount;
}
public long getRecordsTotal() {
return recordsTotal;
}
public void setRecordsTotal(long recordsTotal) {
this.recordsTotal = recordsTotal;
}
public int getRecordsFiltered() {
return recordsFiltered;
}
public void setRecordsFiltered(int recordsFiltered) {
this.recordsFiltered = recordsFiltered;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
}<|fim▁end|> | private static final long serialVersionUID = 1L;
private int totalPages;//totalPages |
<|file_name|>inheritanceTest.py<|end_file_name|><|fim▁begin|>__author__ = 'http://www.python-course.eu/python3_inheritance.php'
class Person:
def __init__(self, first, last):
self.firstname = first
self.lastname = last
def Name(self):<|fim▁hole|>
class Employee(Person):
def __init__(self, first, last, staffnum):
Person.__init__(self,first, last)
self.staffnumber = staffnum
def GetEmployee(self):
return self.Name() + ", " + self.staffnumber
x = Person("Marge", "Simpson")
y = Employee("Homer", "Simpson", "1007")
print(x.Name())
print(y.GetEmployee())<|fim▁end|> | return self.firstname + " " + self.lastname |
<|file_name|>syntax_definition.rs<|end_file_name|><|fim▁begin|>//! Data structures for representing syntax definitions
//!
//! Everything here is public becaues I want this library to be useful in super integrated cases
//! like text editors and I have no idea what kind of monkeying you might want to do with the data.
//! Perhaps parsing your own syntax format into this data structure?
use std::collections::{BTreeMap, HashMap};
use std::hash::Hash;
use super::scope::*;
use super::regex::{Regex, Region};
use regex_syntax::escape;
use serde::{Serialize, Serializer};
use crate::parsing::syntax_set::SyntaxSet;
pub type CaptureMapping = Vec<(usize, Vec<Scope>)>;
/// An opaque ID for a [`Context`].
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct ContextId {
/// Index into [`SyntaxSet::syntaxes`]
pub(crate) syntax_index: usize,
/// Index into [`crate::parsing::LazyContexts::contexts`] for the [`Self::syntax_index`] syntax
pub(crate) context_index: usize,
}
/// The main data structure representing a syntax definition loaded from a
/// `.sublime-syntax` file
///
/// You'll probably only need these as references to be passed around to parsing code.
///
/// Some useful public fields are the `name` field which is a human readable name to display in
/// syntax lists, and the `hidden` field which means hide this syntax from any lists because it is
/// for internal use.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct SyntaxDefinition {
pub name: String,
pub file_extensions: Vec<String>,
pub scope: Scope,
pub first_line_match: Option<String>,
pub hidden: bool,
#[serde(serialize_with = "ordered_map")]
pub variables: HashMap<String, String>,
#[serde(serialize_with = "ordered_map")]
pub contexts: HashMap<String, Context>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct Context {
pub meta_scope: Vec<Scope>,
pub meta_content_scope: Vec<Scope>,
/// This being set false in the syntax file implies this field being set false,
/// but it can also be set falso for contexts that don't include the prototype for other reasons
pub meta_include_prototype: bool,
pub clear_scopes: Option<ClearAmount>,
/// This is filled in by the linker at link time
/// for contexts that have `meta_include_prototype==true`
/// and are not included from the prototype.
pub prototype: Option<ContextId>,
pub uses_backrefs: bool,
pub patterns: Vec<Pattern>,
}
impl Context {
pub fn new(meta_include_prototype: bool) -> Context {
Context {
meta_scope: Vec::new(),
meta_content_scope: Vec::new(),
meta_include_prototype,
clear_scopes: None,
uses_backrefs: false,
patterns: Vec::new(),
prototype: None,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum Pattern {
Match(MatchPattern),
Include(ContextReference),
}
/// Used to iterate over all the match patterns in a context
///
/// Basically walks the tree of patterns and include directives in the correct order.
#[derive(Debug)]
pub struct MatchIter<'a> {
syntax_set: &'a SyntaxSet,
ctx_stack: Vec<&'a Context>,
index_stack: Vec<usize>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct MatchPattern {
pub has_captures: bool,
pub regex: Regex,
pub scope: Vec<Scope>,
pub captures: Option<CaptureMapping>,
pub operation: MatchOperation,
pub with_prototype: Option<ContextReference>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub enum ContextReference {
#[non_exhaustive]
Named(String),
#[non_exhaustive]
ByScope {
scope: Scope,
sub_context: Option<String>,
},
#[non_exhaustive]
File {
name: String,
sub_context: Option<String>,
},
#[non_exhaustive]
Inline(String),
#[non_exhaustive]
Direct(ContextId),
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum MatchOperation {
Push(Vec<ContextReference>),
Set(Vec<ContextReference>),
Pop,
None,
}
impl<'a> Iterator for MatchIter<'a> {
type Item = (&'a Context, usize);
fn next(&mut self) -> Option<(&'a Context, usize)> {
loop {
if self.ctx_stack.is_empty() {
return None;
}
// uncomment for debugging infinite recursion
// println!("{:?}", self.index_stack);
// use std::thread::sleep_ms;
// sleep_ms(500);
let last_index = self.ctx_stack.len() - 1;
let context = self.ctx_stack[last_index];
let index = self.index_stack[last_index];
self.index_stack[last_index] = index + 1;
if index < context.patterns.len() {
match context.patterns[index] {
Pattern::Match(_) => {
return Some((context, index));
},
Pattern::Include(ref ctx_ref) => {
let ctx_ptr = match *ctx_ref {
ContextReference::Direct(ref context_id) => {
self.syntax_set.get_context(context_id)
}
_ => return self.next(), // skip this and move onto the next one
};
self.ctx_stack.push(ctx_ptr);
self.index_stack.push(0);
}
}
} else {
self.ctx_stack.pop();
self.index_stack.pop();
}
}
}
}
/// Returns an iterator over all the match patterns in this context.
///
/// It recursively follows include directives. Can only be run on contexts that have already been
/// linked up.
pub fn context_iter<'a>(syntax_set: &'a SyntaxSet, context: &'a Context) -> MatchIter<'a> {
MatchIter {
syntax_set,
ctx_stack: vec![context],
index_stack: vec![0],
}
}
impl Context {
/// Returns the match pattern at an index, panics if the thing isn't a match pattern
pub fn match_at(&self, index: usize) -> &MatchPattern {
match self.patterns[index] {
Pattern::Match(ref match_pat) => match_pat,
_ => panic!("bad index to match_at"),
}
}
}
impl ContextReference {
/// find the pointed to context, panics if ref is not linked
pub fn resolve<'a>(&self, syntax_set: &'a SyntaxSet) -> &'a Context {
match *self {
ContextReference::Direct(ref context_id) => syntax_set.get_context(context_id),
_ => panic!("Can only call resolve on linked references: {:?}", self),
}
}
/// get the context ID this reference points to, panics if ref is not linked
pub fn id(&self) -> ContextId {
match *self {
ContextReference::Direct(ref context_id) => *context_id,
_ => panic!("Can only get ContextId of linked references: {:?}", self),
}
}
}
pub(crate) fn substitute_backrefs_in_regex<F>(regex_str: &str, substituter: F) -> String
where F: Fn(usize) -> Option<String>
{
let mut reg_str = String::with_capacity(regex_str.len());
let mut last_was_escape = false;
for c in regex_str.chars() {
if last_was_escape && c.is_digit(10) {
let val = c.to_digit(10).unwrap() as usize;<|fim▁hole|> reg_str.push_str(&sub);
}
} else if last_was_escape {
reg_str.push('\\');
reg_str.push(c);
} else if c != '\\' {
reg_str.push(c);
}
last_was_escape = c == '\\' && !last_was_escape;
}
reg_str
}
impl MatchPattern {
pub fn new(
has_captures: bool,
regex_str: String,
scope: Vec<Scope>,
captures: Option<CaptureMapping>,
operation: MatchOperation,
with_prototype: Option<ContextReference>,
) -> MatchPattern {
MatchPattern {
has_captures,
regex: Regex::new(regex_str),
scope,
captures,
operation,
with_prototype,
}
}
/// Used by the parser to compile a regex which needs to reference
/// regions from another matched pattern.
pub fn regex_with_refs(&self, region: &Region, text: &str) -> Regex {
let new_regex = substitute_backrefs_in_regex(self.regex.regex_str(), |i| {
region.pos(i).map(|(start, end)| escape(&text[start..end]))
});
Regex::new(new_regex)
}
pub fn regex(&self) -> &Regex {
&self.regex
}
}
/// Serialize the provided map in natural key order, so that it's deterministic when dumping.
pub(crate) fn ordered_map<K, V, S>(map: &HashMap<K, V>, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer, K: Eq + Hash + Ord + Serialize, V: Serialize
{
let ordered: BTreeMap<_, _> = map.iter().collect();
ordered.serialize(serializer)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_compile_refs() {
let pat = MatchPattern {
has_captures: true,
regex: Regex::new(r"lol \\ \2 \1 '\9' \wz".into()),
scope: vec![],
captures: None,
operation: MatchOperation::None,
with_prototype: None,
};
let r = Regex::new(r"(\\\[\]\(\))(b)(c)(d)(e)".into());
let s = r"\[]()bcde";
let mut region = Region::new();
let matched = r.search(s, 0, s.len(), Some(&mut region));
assert!(matched);
let regex_with_refs = pat.regex_with_refs(®ion, s);
assert_eq!(regex_with_refs.regex_str(), r"lol \\ b \\\[\]\(\) '' \wz");
}
}<|fim▁end|> | if let Some(sub) = substituter(val) { |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
from miniURL.models import Redirection
#Pour faire un formulaire depuis un modèle. (/!\ héritage différent)
class RedirectionForm(forms.ModelForm):
class Meta:
model = Redirection
fields = ('real_url', 'pseudo')
# Pour récupérer des données cel apeut ce faire avec un POST<|fim▁hole|># ou directement en donnant un objet du modele :
#form = ArticleForm(instance=article) # article est bien entendu un objet d'Article quelconque dans la base de données
# Le champs est ainsi préremplit.
# Quand on a recu une bonne formeModele il suffit de save() pour la mettre en base<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub struct Actor {
x: i32,
y: i32
}
impl Actor {
fn follow(self, dir: &Direction) -> Actor {
match *dir {
Direction::North => Actor { x: self.x, y: self.y + 1 },
Direction::East => Actor { x: self.x + 1, y: self.y },
Direction::South => Actor { x: self.x, y: self.y - 1 },
Direction::West => Actor { x: self.x - 1, y: self.y }
}
}<|fim▁hole|>}
#[derive(PartialEq, Debug)]
pub enum Direction {
North,
East,
South,
West,
}
impl Direction {
// (Left turn, Right turn)
fn turns(&self) -> (Direction, Direction) {
match *self {
Direction::North => (Direction::West, Direction::East),
Direction::East => (Direction::North, Direction::South),
Direction::South => (Direction::East, Direction::West),
Direction::West => (Direction::South, Direction::North)
}
}
}
pub struct Robot(Actor, Direction);
impl Robot {
pub fn new(x: isize, y: isize, d: Direction) -> Self {
Robot(
Actor {
x : x as i32,
y : y as i32 },
d
)
}
pub fn turn_right(self) -> Self {
Robot(self.0, self.1.turns().1)
}
pub fn turn_left(self) -> Self {
Robot(self.0, self.1.turns().0)
}
pub fn advance(self) -> Self {
Robot(self.0.follow(&self.1), self.1)
}
fn obey(self, code: &str) -> Self {
match code {
"R" => self.turn_right(),
"L" => self.turn_left(),
"A" => self.advance(),
_ => panic!("Invalid instruction code")
}
}
pub fn instructions(self, instructions: &str) -> Self {
match instructions.split_at(1) {
(c, "") => self.obey(c),
(c, rest) => self.obey(c).instructions(rest)
}
}
pub fn position(&self) -> (isize, isize) {
(self.0.x as isize, self.0.y as isize)
}
pub fn direction(&self) -> &Direction {
&self.1
}
}<|fim▁end|> | |
<|file_name|>attr-derive.rs<|end_file_name|><|fim▁begin|>// aux-build:derive-foo.rs
// pp-exact
// Testing that both the inner item and next outer item are
// preserved, and that the first outer item parsed in main is not
// accidentally carried over to each inner function
#[macro_use]
extern crate derive_foo;
#[derive(Foo)]
struct X;
#[derive(Foo)]
#[Bar]
struct Y;
#[derive(Foo)]<|fim▁hole|>struct WithRef {
x: X,
#[Bar]
y: Y,
}
#[derive(Foo)]
enum Enum {
#[Bar]
Asdf,
Qwerty,
}
fn main() { }<|fim▁end|> | |
<|file_name|>kEdge.js<|end_file_name|><|fim▁begin|>'use strict';
var ANONYMOUS_USER_ID = "55268521fb9a901e442172f8";
var mongoose = require('mongoose');
//var Promise = require("bluebird");
var dbService = require('@colabo-knalledge/b-storage-mongo');
<|fim▁hole|>
function resSendJsonProtected(res, data) {
// http://tobyho.com/2011/01/28/checking-types-in-javascript/
if (data !== null && typeof data === 'object') { // http://stackoverflow.com/questions/8511281/check-if-a-variable-is-an-object-in-javascript
res.set('Content-Type', 'application/json');
// JSON Vulnerability Protection
// http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx/
// https://docs.angularjs.org/api/ng/service/$http#description_security-considerations_cross-site-request-forgery-protection
res.send(")]}',\n" + JSON.stringify(data));
} else if (typeof data === 'string') { // http://stackoverflow.com/questions/4059147/check-if-a-variable-is-a-string
res.send(data);
} else {
res.send(data);
}
};
var dbConnection = dbService.DBConnect();
var KEdgeModel = dbConnection.model('kEdge', global.db.kEdge.Schema);
//reguired for requests that return results populated with target or ource nodes:
var KNodeModel = dbConnection.model('KNode', global.db.kNode.Schema);
// module.exports = KEdgeModel; //then we can use it by: var User = require('./app/models/KEdgeModel');
// curl -v -H "Content-Type: application/json" -X GET http://127.0.0.1:8888/kedges/one/5524344b498be1070ccca4f6
// curl -v -H "Content-Type: application/json" -X GET http://127.0.0.1:8888/kedges/one/5524344b498be1070ccca4f6
// curl -v -H "Content-Type: application/json" -X GET http://127.0.0.1:8888/kedges/one/5524344b498be1070ccca4f6
//curl -v -H "Content-Type: application/json" -X GET http://127.0.0.1:8888/kedges/between/551b4366fd64e5552ed19364/551bb2c68f6e4cfc35654f37
//curl -v -H "Content-Type: application/json" -X GET http://127.0.0.1:8888/kedges/in_map/552678e69ad190a642ad461c
exports.index = function(req, res) {
var id = req.params.searchParam;
var id2 = req.params.searchParam2;
var id3 = req.params.searchParam3;
var id4 = req.params.searchParam4;
var type = req.params.type;
var found = function(err, kEdges) {
//console.log("[modules/kEdge.js:index] in found; req.params.type: %s: ", req.params.type);
//console.log("kEdges:"+kEdges);
if (err) {
throw err;
var msg = JSON.stringify(err);
resSendJsonProtected(res, { data: kEdges, accessId: accessId, message: msg, success: false });
} else {
resSendJsonProtected(res, { data: kEdges, accessId: accessId, success: true });
}
}
console.log("[modules/kEdge.js:index] req.params.searchParam: %s. req.params.searchParam2: %s", req.params.searchParam, req.params.searchParam2);
if (mockup && mockup.db && mockup.db.data) {
var datas_json = [];
//TODO: change data here:
datas_json.push({ id: 1, name: "Sun" });
datas_json.push({ id: 2, name: "Earth" });
datas_json.push({ id: 3, name: "Pluto" });
datas_json.push({ id: 4, name: "Venera" });
resSendJsonProtected(res, { data: datas_json, accessId: accessId });
}
//TODO: remove (testing)
KEdgeModel.find(function(err, kEdges) {
//console.log("all data:\n length: %d.\n", kEdges.length);
//console.log(kEdges);
//resSendJsonProtected(res, {data: {, accessId : accessId, success: true});
});
switch (type) {
case 'one': //by edge id:
KEdgeModel.findById(req.params.searchParam, found);
break;
case 'between': //all edges between specific nodes:
KEdgeModel.find({ $and: [{ 'sourceId': req.params.searchParam }, { 'targetId': req.params.searchParam2 }] }, found);
break;
case 'connected': //all edges connected to knode.id
KEdgeModel.find({ $or: [{ 'sourceId': req.params.searchParam }, { 'targetId': req.params.searchParam }] }, found);
break;
case 'in_map': //all edges in specific map
KEdgeModel.find({ 'mapId': req.params.searchParam }, found);
break;
case 'for_map_type_user_w_target_nodes':
console.log("for_map_type_user_w_target_nodes: mapId: %s, type: %s", id, id2);
var queryObj = { 'mapId': id, 'type': id2};
if(id3 !== null && id3 !== undefined && id3 !== 'null') {
console.log('iAmId: ', id3);
queryObj['iAmId'] = id3;
}
else{
console.log('iAmId: is not set as a paremeter - so for all users');
}
KEdgeModel.find(queryObj).populate('targetId', '_id name dataContent.humanID').exec(found);
break;
default:
console.log("[modules/kEdge.js:index] unsuported req.params.type: %s", type);
resSendJsonProtected(res, { data: [], accessId: accessId, message: 'unsuported req type \'' + req.params.type + '\'', success: false });
}
}
// curl -v -H "Content-Type: application/json" -X POST -d '{"name":"Hello Edge", "iAmId":5, "type":"contains", "sourceId":"551b4366fd64e5552ed19364", "targetId": "551bb2c68f6e4cfc35654f37", "ideaId":0}' http://127.0.0.1:8888/kedges
// curl -v -H "Content-Type: application/json" -X POST -d '{"name":"Hello Edge 3", "iAmId":6, "type":"contains", "ideaId":0}' http://127.0.0.1:8888/kedges
exports.create = function(req, res) {
console.log("[modules/kEdge.js:create] req.body: %s", JSON.stringify(req.body));
var data = req.body;
if (!("iAmId" in data) || data.iAmId == null || data.iAmId == 0) data.iAmId = mongoose.Types.ObjectId(ANONYMOUS_USER_ID);
var kEdge = new KEdgeModel(data);
//TODO: Should we force existence of node ids?
if (data.sourceId) {
kEdge.sourceId = mongoose.Types.ObjectId(data.sourceId);
}
if (data.targetId) {
kEdge.targetId = mongoose.Types.ObjectId(data.targetId);
}
kEdge.save(function(err) {
if (err) throw err;
console.log("[modules/kEdge.js:create] data (id:%s) created data: %s", kEdge.id, JSON.stringify(kEdge));
resSendJsonProtected(res, { success: true, data: kEdge, accessId: accessId });
});
}
//curl -v -H "Content-Type: application/json" -X PUT -d '{"name": "Hello World E1"}' http://127.0.0.1:8888/kedges/one/551bb2c68f6e4cfc35654f37
//curl -v -H "Content-Type: application/json" -X PUT -d '{"mapId": "552678e69ad190a642ad461c", "sourceId": "55268521fb9a901e442172f9", "targetId": "5526855ac4f4db29446bd183"}' http://127.0.0.1:8888/kedges/one/552475525034f70c166bf89c
exports.update = function(req, res) {
//console.log("[modules/KEdge.js:update] req.body: %s", JSON.stringify(req.body));
var data = req.body;
var id = req.params.searchParam;
console.log("[modules/KEdge.js:update] id : %s", id);
console.log("[modules/KEdge.js:update] data, : %s", JSON.stringify(data));
delete data._id;
//TODO: check this: multi (boolean) whether multiple documents should be updated (false)
//TODO: fix: numberAffected vraca 0, a raw vraca undefined. pitanje je da li su ispravni parametri callback f-je
// KEdgeModel.findByIdAndUpdate(id , data, { /* multi: true */ }, function (err, numberAffected, raw) {
// if (err) throw err;
// console.log('The number of updated documents was %d', numberAffected);
// console.log('The raw response from Mongo was ', raw);
// resSendJsonProtected(res, {success: true, data: data, accessId : accessId});
// });
data.updatedAt = new Date(); //TODO: workaround for hook "schema.pre('update',...)" not working
KEdgeModel.update({ _id: id }, data, function(err, raw) {
if (err) throw err;
console.log('The raw response from Mongo was ', raw);
data._id = id;
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
}
exports.destroy = function(req, res) {
var type = req.params.type;
var dataId = req.params.searchParam;
var dataId2 = req.params.searchParam2;
console.log("[modules/kEdge.js::destroy] dataId:%s, type:%s, req.body: %s", dataId, type, JSON.stringify(req.body));
switch (type) {
case 'one': //by edge id:
console.log("[modules/kEdge.js:destroy] deleting 'one' edge with id = %d", dataId);
KEdgeModel.findByIdAndRemove(dataId, function(err) {
if (err) throw err;
var data = { id: dataId };
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
break;
case 'connected': //all edges connected to knode.id
console.log("[modules/kEdge.js:destroy] deleting 'connected' to %s", dataId);
KEdgeModel.remove({ $or: [{ 'sourceId': dataId }, { 'targetId': dataId }] }, function(err) {
if (err) {
console.log("[modules/kEdge.js:destroy] error:" + err);
throw err;
}
var data = { id: dataId };
console.log("[modules/kEdge.js:destroy] data:" + JSON.stringify(data));
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
break;
case 'in-map': //all edges in the map
console.log("[modules/kEdge.js:destroy] deleting edges in map %s", dataId);
KEdgeModel.remove({ 'mapId': dataId }, function(err) {
if (err) {
console.log("[modules/kEdge.js:destroy] error:" + err);
throw err;
}
var data = { id: dataId };
console.log("[modules/kEdge.js:destroy] data:" + JSON.stringify(data));
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
break;
// TODO: this currently delete all edges that belongs to the provided mapId
case 'by-modification-source': // by source (manual/computer) of modification
console.log("[modules/kEdge.js:destroy] deleting edges in map %s", dataId);
KEdgeModel.remove({ 'mapId': dataId }, function(err) {
if (err) {
console.log("[modules/kEdge.js:destroy] error:" + err);
throw err;
}
var data = { id: dataId };
console.log("[modules/kEdge.js:destroy] data:" + JSON.stringify(data));
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
break;
case 'by-type-n-user': // by type and user
//TODO: we must also filter by `mapId` but so far we are sending only 2 parameters!
console.log("[modules/kEdge.js:destroy] deleting all edges of type %s by user %s", dataId, dataId2);
KEdgeModel.remove({ $and: [{ 'type': dataId }, { 'iAmId': dataId2 }] }, function(err) {
if (err) {
console.log("[modules/kEdge.js:destroy] error:" + err);
throw err;
}
var data = { id: dataId };
console.log("[modules/kEdge.js:destroy] data:" + JSON.stringify(data));
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
break;
case 'edges-to-child': // by type and user
console.log("[modules/kEdge.js:destroy] deleting all edges with specific tagetId %s", dataId);
KEdgeModel.remove({ 'targetId': dataId }, function(err) {
if (err) {
console.log("[modules/kEdge.js:destroy] error:" + err);
throw err;
}
var data = { id: dataId };
console.log("[modules/kEdge.js:destroy] data:" + JSON.stringify(data));
resSendJsonProtected(res, { success: true, data: data, accessId: accessId });
});
break;
default:
console.log("[modules/kEdge.js:index] unsuported req.params.type: %s", type);
resSendJsonProtected(res, { data: [], accessId: accessId, message: 'unsuported req type \'' + type + '\'', success: false });
}
};<|fim▁end|> | var mockup = { fb: { authenticate: false }, db: { data: false } };
var accessId = 0; |
<|file_name|>test_auth_resources.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2017 Jesús Espino <[email protected]>
# Copyright (C) 2014-2017 David Barragán <[email protected]>
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]>
# Copyright (C) 2014-2017 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
<|fim▁hole|>from tests import factories as f
from tests.utils import disconnect_signals, reconnect_signals
import pytest
pytestmark = pytest.mark.django_db
def setup_module(module):
disconnect_signals()
def teardown_module(module):
reconnect_signals()
def test_auth_create(client):
url = reverse('auth-list')
user = f.UserFactory.create()
login_data = json.dumps({
"type": "normal",
"username": user.username,
"password": user.username,
})
result = client.post(url, login_data, content_type="application/json")
assert result.status_code == 200
def test_auth_action_register(client, settings):
settings.PUBLIC_REGISTER_ENABLED = True
url = reverse('auth-register')
register_data = json.dumps({
"type": "public",
"username": "test",
"password": "test",
"full_name": "test",
"email": "[email protected]",
})
result = client.post(url, register_data, content_type="application/json")
assert result.status_code == 201<|fim▁end|> | |
<|file_name|>vector_utils.py<|end_file_name|><|fim▁begin|>import numpy as np
from scipy.sparse import csr_matrix, coo_matrix
def save_sparse_csr(filename,array):
np.savez(filename,data = array.data ,indices=array.indices,
indptr =array.indptr, shape=array.shape )
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
def save_arr_assparse(filename, array):
return save_sparse_csr(filename, csr_matrix(array))
def load_sparse_asarr(filename):
return load_sparse_csr(filename).toarray()
def to_onehot(Y, vector_size):
Y_vec = []
# Now y should be converted to one hot vector for each index value
for i in xrange(len(Y)):
Y_vec.append([])
for j in xrange(len(Y[0])):
y_vec = np.zeros(vector_size)
y_vec[Y[i][j]] = 1
Y_vec[-1].append(y_vec)
return np.array(Y_vec, dtype='int32')
def to_onehot_char(X, vector_size):
X_vec = np.zeros(X.shape + (vector_size,))
for i in xrange(X_vec.shape[0]):
for j in xrange(X_vec.shape[1]):
for k in xrange(X_vec.shape[2]):
try:
X_vec[i,j,k,X[i,j,k]] = 1
except:
print X_vec.shape, X.shape, (i, j, k), X[i,j,k]
raise Exception
return X_vec
def onehot_to_idxarr(Y):
return Y.argmax(axis=len(Y.shape) - 1)
def confusion_matrix(y_pred, y_true, labels=None):
# Send only filtered y values.
y_pred, y_true = np.array(y_pred).flatten().squeeze(), np.array(y_true).flatten().squeeze()
if labels is None:
labels = list(set(y_true).union(set(y_pred)))
n_labels = len(labels)
print "[%s] labels = %s" % (n_labels, labels)
CM = coo_matrix((np.ones_like(y_true, dtype="int"), (y_true, y_pred)), shape=(n_labels, n_labels)).todense()
return CM
def get_prf_scores(cm):
scores = dict()
TP = np.diag(cm)
FP = np.squeeze(np.asarray((np.sum(cm, axis=1)))) - TP
FN = np.squeeze(np.asarray((np.sum(cm, axis=0)))) - TP
scores["TP"] = TP
scores["FP"] = FP
scores["FN"] = FN
precision = TP * 1. / (TP + FP)
recall = TP * 1. / (TP + FN)
f1_score = 2*precision*recall / (precision + recall)
macro_f1 = np.mean(f1_score)
scores["precision"] = precision
scores["recall"] = recall
scores["f1_score"] = f1_score
scores["macro_f1"] = macro_f1
micro_precision = np.sum(TP) * 1. / np.sum(TP + FP)
micro_recall = np.sum(TP) * 1. / np.sum(TP + FN)<|fim▁hole|> scores["micro_recall"] = micro_recall
scores["micro_f1"] = micro_f1
return scores
def get_eval_scores(y_pred, y_true, labels=None):
return get_prf_scores(confusion_matrix(y_pred, y_true, labels=labels))<|fim▁end|> | micro_f1 = 2*micro_precision*micro_recall / (micro_precision+micro_recall)
scores["micro_precision"] = micro_precision |
<|file_name|>atmelavr.py<|end_file_name|><|fim▁begin|># Copyright (C) Ivan Kravets <[email protected]>
# See LICENSE for details.
"""
Builder for Atmel AVR series of microcontrollers
"""
from os.path import join
from time import sleep
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, Default,
DefaultEnvironment, SConscript)
from platformio.util import get_serialports
def BeforeUpload(target, source, env): # pylint: disable=W0613,W0621
def _rpi_sysgpio(path, value):
with open(path, "w") as f:
f.write(str(value))
if "micronucleus" in env['UPLOADER']:
print "Please unplug/plug device ..."
upload_options = env.get("BOARD_OPTIONS", {}).get("upload", {})
if "usb" in env.subst("$UPLOAD_PROTOCOL"):
upload_options['require_upload_port'] = False
env.Replace(UPLOAD_SPEED=None)
if env.subst("$UPLOAD_SPEED"):
env.Append(UPLOADERFLAGS=[
"-b", "$UPLOAD_SPEED",
"-D"
])
if not upload_options.get("require_upload_port", False):
return
env.AutodetectUploadPort()
env.Append(UPLOADERFLAGS=["-P", "$UPLOAD_PORT"])
if env.subst("$BOARD") == "raspduino":
_rpi_sysgpio("/sys/class/gpio/export", 18)
_rpi_sysgpio("/sys/class/gpio/gpio18/direction", "out")
_rpi_sysgpio("/sys/class/gpio/gpio18/value", 1)
sleep(0.1)
_rpi_sysgpio("/sys/class/gpio/gpio18/value", 0)
_rpi_sysgpio("/sys/class/gpio/unexport", 18)
else:
if not upload_options.get("disable_flushing", False):
env.FlushSerialBuffer("$UPLOAD_PORT")
before_ports = [i['port'] for i in get_serialports()]
if upload_options.get("use_1200bps_touch", False):
env.TouchSerialPort("$UPLOAD_PORT", 1200)
if upload_options.get("wait_for_upload_port", False):
env.Replace(UPLOAD_PORT=env.WaitForNewSerialPort(before_ports))
env = DefaultEnvironment()
SConscript(env.subst(join("$PIOBUILDER_DIR", "scripts", "baseavr.py")))
if "digispark" in env.get(<|fim▁hole|> "BOARD_OPTIONS", {}).get("build", {}).get("core", ""):
env.Replace(
UPLOADER=join("$PIOPACKAGES_DIR", "tool-micronucleus", "micronucleus"),
UPLOADERFLAGS=[
"-c", "$UPLOAD_PROTOCOL",
"--timeout", "60"
],
UPLOADHEXCMD='"$UPLOADER" $UPLOADERFLAGS $SOURCES'
)
else:
env.Replace(
UPLOADER=join("$PIOPACKAGES_DIR", "tool-avrdude", "avrdude"),
UPLOADERFLAGS=[
"-v",
"-p", "$BOARD_MCU",
"-C",
'"%s"' % join("$PIOPACKAGES_DIR", "tool-avrdude", "avrdude.conf"),
"-c", "$UPLOAD_PROTOCOL"
],
UPLOADHEXCMD='"$UPLOADER" $UPLOADERFLAGS -U flash:w:$SOURCES:i',
UPLOADEEPCMD='"$UPLOADER" $UPLOADERFLAGS -U eeprom:w:$SOURCES:i'
)
#
# Target: Build executable and linkable firmware
#
target_elf = env.BuildFirmware()
#
# Target: Extract EEPROM data (from EEMEM directive) to .eep file
#
target_eep = env.Alias("eep", env.ElfToEep(join("$BUILD_DIR", "firmware"),
target_elf))
#
# Target: Build the .hex file
#
if "uploadlazy" in COMMAND_LINE_TARGETS:
target_firm = join("$BUILD_DIR", "firmware.hex")
else:
target_firm = env.ElfToHex(join("$BUILD_DIR", "firmware"), target_elf)
#
# Target: Print binary size
#
target_size = env.Alias("size", target_elf, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Upload by default .hex file
#
upload = env.Alias(["upload", "uploadlazy"], target_firm,
[BeforeUpload, "$UPLOADHEXCMD"])
AlwaysBuild(upload)
#
# Target: Upload .eep file
#
uploadeep = env.Alias("uploadeep", target_eep, [
BeforeUpload, "$UPLOADEEPCMD"])
AlwaysBuild(uploadeep)
#
# Setup default targets
#
Default([target_firm, target_size])<|fim▁end|> | |
<|file_name|>devtools.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::{AutoMargins, CachedConsoleMessage, CachedConsoleMessageTypes};
use devtools_traits::{ComputedNodeLayout, ConsoleAPI, PageError};
use devtools_traits::{EvaluateJSReply, Modification, NodeInfo, TimelineMarker};
use devtools_traits::TimelineMarkerType;
use dom::bindings::codegen::Bindings::CSSStyleDeclarationBinding::CSSStyleDeclarationMethods;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::conversions::{ConversionResult, FromJSValConvertible, jsstring_to_str};
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::DomObject;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::document::AnimationFrameCallback;
use dom::element::Element;
use dom::globalscope::GlobalScope;
use dom::node::{Node, window_from_node};
use dom::window::Window;
use ipc_channel::ipc::IpcSender;
use js::jsapi::{JSAutoCompartment, ObjectClassName};
use js::jsval::UndefinedValue;
use msg::constellation_msg::PipelineId;
use script_thread::Documents;
use std::ffi::CStr;
use std::str;
use style::properties::longhands::{margin_bottom, margin_left, margin_right, margin_top};
use uuid::Uuid;
#[allow(unsafe_code)]
pub fn handle_evaluate_js(global: &GlobalScope, eval: String, reply: IpcSender<EvaluateJSReply>) {
// global.get_cx() returns a valid `JSContext` pointer, so this is safe.
let result = unsafe {
let cx = global.get_cx();
let globalhandle = global.reflector().get_jsobject();
let _ac = JSAutoCompartment::new(cx, globalhandle.get());
rooted!(in(cx) let mut rval = UndefinedValue());
global.evaluate_js_on_global_with_result(&eval, rval.handle_mut());
if rval.is_undefined() {
EvaluateJSReply::VoidValue
} else if rval.is_boolean() {
EvaluateJSReply::BooleanValue(rval.to_boolean())
} else if rval.is_double() || rval.is_int32() {
EvaluateJSReply::NumberValue(
match FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
Ok(ConversionResult::Success(v)) => v,
_ => unreachable!(),
})
} else if rval.is_string() {
EvaluateJSReply::StringValue(String::from(jsstring_to_str(cx, rval.to_string())))
} else if rval.is_null() {
EvaluateJSReply::NullValue
} else {
assert!(rval.is_object());
rooted!(in(cx) let obj = rval.to_object());
let class_name = CStr::from_ptr(ObjectClassName(cx, obj.handle()));
let class_name = str::from_utf8(class_name.to_bytes()).unwrap();
EvaluateJSReply::ActorValue {
class: class_name.to_owned(),
uuid: Uuid::new_v4().to_string(),
}
}
};
reply.send(result).unwrap();
}
pub fn handle_get_root_node(documents: &Documents, pipeline: PipelineId, reply: IpcSender<Option<NodeInfo>>) {
let info = documents.find_document(pipeline)
.map(|document| document.upcast::<Node>().summarize());
reply.send(info).unwrap();
}
pub fn handle_get_document_element(documents: &Documents,
pipeline: PipelineId,
reply: IpcSender<Option<NodeInfo>>) {
let info = documents.find_document(pipeline)
.and_then(|document| document.GetDocumentElement())
.map(|element| element.upcast::<Node>().summarize());
reply.send(info).unwrap();
}
fn find_node_by_unique_id(documents: &Documents,
pipeline: PipelineId,
node_id: &str)
-> Option<DomRoot<Node>> {
documents.find_document(pipeline).and_then(|document|
document.upcast::<Node>().traverse_preorder().find(|candidate| candidate.unique_id() == node_id)
)
}
pub fn handle_get_children(documents: &Documents,
pipeline: PipelineId,
node_id: String,
reply: IpcSender<Option<Vec<NodeInfo>>>) {
match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return reply.send(None).unwrap(),
Some(parent) => {
let children = parent.children()
.map(|child| child.summarize())
.collect();
reply.send(Some(children)).unwrap();
}
};
}
pub fn handle_get_layout(documents: &Documents,
pipeline: PipelineId,
node_id: String,
reply: IpcSender<Option<ComputedNodeLayout>>) {
let node = match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return reply.send(None).unwrap(),
Some(found_node) => found_node
};
let elem = node.downcast::<Element>().expect("should be getting layout of element");
let rect = elem.GetBoundingClientRect();
let width = rect.Width() as f32;
let height = rect.Height() as f32;
let window = window_from_node(&*node);
let elem = node.downcast::<Element>().expect("should be getting layout of element");
let computed_style = window.GetComputedStyle(elem, None);
reply.send(Some(ComputedNodeLayout {
display: String::from(computed_style.Display()),
position: String::from(computed_style.Position()),
zIndex: String::from(computed_style.ZIndex()),
boxSizing: String::from(computed_style.BoxSizing()),
autoMargins: determine_auto_margins(&window, &*node),
marginTop: String::from(computed_style.MarginTop()),
marginRight: String::from(computed_style.MarginRight()),
marginBottom: String::from(computed_style.MarginBottom()),
marginLeft: String::from(computed_style.MarginLeft()),
borderTopWidth: String::from(computed_style.BorderTopWidth()),
borderRightWidth: String::from(computed_style.BorderRightWidth()),
borderBottomWidth: String::from(computed_style.BorderBottomWidth()),
borderLeftWidth: String::from(computed_style.BorderLeftWidth()),
paddingTop: String::from(computed_style.PaddingTop()),
paddingRight: String::from(computed_style.PaddingRight()),
paddingBottom: String::from(computed_style.PaddingBottom()),
paddingLeft: String::from(computed_style.PaddingLeft()),
width: width,
height: height,
})).unwrap();
}
fn determine_auto_margins(window: &Window, node: &Node) -> AutoMargins {
let margin = window.margin_style_query(node.to_trusted_node_address());
AutoMargins {
top: margin.top == margin_top::computed_value::T::Auto,
right: margin.right == margin_right::computed_value::T::Auto,
bottom: margin.bottom == margin_bottom::computed_value::T::Auto,
left: margin.left == margin_left::computed_value::T::Auto,
}
}
pub fn handle_get_cached_messages(_pipeline_id: PipelineId,
message_types: CachedConsoleMessageTypes,
reply: IpcSender<Vec<CachedConsoleMessage>>) {
// TODO: check the messageTypes against a global Cache for console messages and page exceptions
let mut messages = Vec::new();
if message_types.contains(CachedConsoleMessageTypes::PAGE_ERROR) {
// TODO: make script error reporter pass all reported errors
// to devtools and cache them for returning here.
let msg = PageError {
type_: "PageError".to_owned(),
errorMessage: "page error test".to_owned(),
sourceName: String::new(),
lineText: String::new(),
lineNumber: 0,
columnNumber: 0,
category: String::new(),
timeStamp: 0,
error: false,
warning: false,
exception: false,
strict: false,
private: false,
};
messages.push(CachedConsoleMessage::PageError(msg));
}
if message_types.contains(CachedConsoleMessageTypes::CONSOLE_API) {
// TODO: do for real
let msg = ConsoleAPI {
type_: "ConsoleAPI".to_owned(),
level: "error".to_owned(),
filename: "http://localhost/~mihai/mozilla/test.html".to_owned(),
lineNumber: 0,
functionName: String::new(),
timeStamp: 0,
private: false,
arguments: vec!["console error test".to_owned()],
};
messages.push(CachedConsoleMessage::ConsoleAPI(msg));
}
reply.send(messages).unwrap();
}
pub fn handle_modify_attribute(documents: &Documents,
pipeline: PipelineId,
node_id: String,
modifications: Vec<Modification>) {
let node = match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return warn!("node id {} for pipeline id {} is not found", &node_id, &pipeline),
Some(found_node) => found_node
};
let elem = node.downcast::<Element>().expect("should be getting layout of element");
for modification in modifications {
match modification.newValue {
Some(string) => {
let _ = elem.SetAttribute(DOMString::from(modification.attributeName),
DOMString::from(string));
},
None => elem.RemoveAttribute(DOMString::from(modification.attributeName)),
}
}
}
pub fn handle_wants_live_notifications(global: &GlobalScope, send_notifications: bool) {
global.set_devtools_wants_updates(send_notifications);
}
pub fn handle_set_timeline_markers(documents: &Documents,
pipeline: PipelineId,
marker_types: Vec<TimelineMarkerType>,
reply: IpcSender<Option<TimelineMarker>>) {
match documents.find_window(pipeline) {
None => reply.send(None).unwrap(),
Some(window) => window.set_devtools_timeline_markers(marker_types, reply),
}
}
pub fn handle_drop_timeline_markers(documents: &Documents,
pipeline: PipelineId,
marker_types: Vec<TimelineMarkerType>) {
if let Some(window) = documents.find_window(pipeline) {
window.drop_devtools_timeline_markers(marker_types);
}
}
pub fn handle_request_animation_frame(documents: &Documents,
id: PipelineId,
actor_name: String) {
if let Some(doc) = documents.find_document(id) {
doc.request_animation_frame(AnimationFrameCallback::DevtoolsFramerateTick { actor_name });
}
}
pub fn handle_reload(documents: &Documents,<|fim▁hole|> }
}<|fim▁end|> | id: PipelineId) {
if let Some(win) = documents.find_window(id) {
win.Location().reload_without_origin_check(); |
<|file_name|>ecdsa_signer_key_manager.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! Key manager for ECDSA signing keys.
use generic_array::typenum::Unsigned;
use p256::elliptic_curve;
use tink_core::{utils::wrap_err, TinkError};
use tink_proto::{prost::Message, EllipticCurveType};
/// Maximal version of ECDSA keys.
pub const ECDSA_SIGNER_KEY_VERSION: u32 = 0;
/// Type URL of ECDSA keys that Tink supports.
pub const ECDSA_SIGNER_TYPE_URL: &str = "type.googleapis.com/google.crypto.tink.EcdsaPrivateKey";
/// An implementation of the [`tink_core::registry::KeyManager`] trait.
/// It generates new ECDSA private keys and produces new instances of
/// [`crate::subtle::EcdsaSigner`].
#[derive(Default)]
pub(crate) struct EcdsaSignerKeyManager {}
/// Prefix for uncompressed elliptic curve points.
pub const ECDSA_UNCOMPRESSED_POINT_PREFIX: u8 = 0x04;
impl tink_core::registry::KeyManager for EcdsaSignerKeyManager {
fn primitive(&self, serialized_key: &[u8]) -> Result<tink_core::Primitive, TinkError> {
if serialized_key.is_empty() {
return Err("EcdsaSignerKeyManager: invalid key".into());
}
let key = tink_proto::EcdsaPrivateKey::decode(serialized_key)
.map_err(|e| wrap_err("EcdsaSignerKeyManager: invalid key", e))?;
let params = validate_key(&key)?;
let (hash, curve, encoding) = crate::get_ecdsa_param_ids(¶ms);
match crate::subtle::EcdsaSigner::new(hash, curve, encoding, &key.key_value) {
Ok(p) => Ok(tink_core::Primitive::Signer(Box::new(p))),
Err(e) => Err(wrap_err("EcdsaSignerKeyManager: invalid key", e)),
}
}
fn new_key(&self, serialized_key_format: &[u8]) -> Result<Vec<u8>, TinkError> {
if serialized_key_format.is_empty() {
return Err("EcdsaSignerKeyManager: invalid key format".into());
}
let key_format = tink_proto::EcdsaKeyFormat::decode(serialized_key_format)
.map_err(|e| wrap_err("EcdsaSignerKeyManager: invalid key", e))?;
let (params, curve) = validate_key_format(&key_format)?;
// generate key
let mut csprng = signature::rand_core::OsRng {};
let (secret_key_data, pub_x_data, pub_y_data) = match curve {
EllipticCurveType::NistP256 => {
// Generate a new keypair.
let secret_key = p256::ecdsa::SigningKey::random(&mut csprng);
let public_key = p256::ecdsa::VerifyingKey::from(&secret_key);
let public_key_point = public_key.to_encoded_point(/* compress= */ false);
let public_key_data = public_key_point.as_bytes();
// Check that the public key data is in the expected uncompressed format:
// - 1 byte uncompressed prefix (0x04)
// - P bytes of X coordinate
// - P bytes of Y coordinate
// where P is the field element size.
let point_len = elliptic_curve::FieldSize::<p256::NistP256>::to_usize();
if public_key_data.len() != 2 * point_len + 1
|| public_key_data[0] != ECDSA_UNCOMPRESSED_POINT_PREFIX
{
return Err("EcdsaSignerKeyManager: unexpected public key data format".into());
}
(
secret_key.to_bytes().to_vec(),
public_key_data[1..point_len + 1].to_vec(),
public_key_data[point_len + 1..].to_vec(),
)
}
_ => {
return Err(format!("EcdsaSignerKeyManager: unsupported curve {:?}", curve).into())
}
};
let pub_key = tink_proto::EcdsaPublicKey {
version: ECDSA_SIGNER_KEY_VERSION,
params: Some(params),
x: pub_x_data,
y: pub_y_data,
};
let priv_key = tink_proto::EcdsaPrivateKey {
version: ECDSA_SIGNER_KEY_VERSION,
public_key: Some(pub_key),
key_value: secret_key_data,
};
let mut sk = Vec::new();
priv_key
.encode(&mut sk)
.map_err(|e| wrap_err("EcdsaSignerKeyManager: failed to encode new key", e))?;
Ok(sk)
}
fn type_url(&self) -> &'static str {
ECDSA_SIGNER_TYPE_URL
}
fn key_material_type(&self) -> tink_proto::key_data::KeyMaterialType {
tink_proto::key_data::KeyMaterialType::AsymmetricPrivate
}
fn supports_private_keys(&self) -> bool {
true
}
fn public_key_data(
&self,
serialized_priv_key: &[u8],
) -> Result<tink_proto::KeyData, TinkError> {
let priv_key = tink_proto::EcdsaPrivateKey::decode(serialized_priv_key)
.map_err(|e| wrap_err("EcdsaSignerKeyManager: invalid private key", e))?;
let mut serialized_pub_key = Vec::new();
priv_key
.public_key
.ok_or_else(|| TinkError::new("EcdsaSignerKeyManager: no public key"))?
.encode(&mut serialized_pub_key)
.map_err(|e| wrap_err("EcdsaSignerKeyManager: invalid public key", e))?;
Ok(tink_proto::KeyData {
type_url: crate::ECDSA_VERIFIER_TYPE_URL.to_string(),
value: serialized_pub_key,
key_material_type: tink_proto::key_data::KeyMaterialType::AsymmetricPublic as i32,
})
}
}
/// Validate the given [`EcdsaPrivateKey`](tink_proto::EcdsaPrivateKey) and return
/// the parameters.
fn validate_key(key: &tink_proto::EcdsaPrivateKey) -> Result<tink_proto::EcdsaParams, TinkError> {
tink_core::keyset::validate_key_version(key.version, ECDSA_SIGNER_KEY_VERSION)
.map_err(|e| wrap_err("EcdsaSignerKeyManager", e))?;
let pub_key = key
.public_key
.as_ref()
.ok_or_else(|| TinkError::new("EcdsaSignerKeyManager: no public key"))?;
let params = crate::validate_ecdsa_public_key(pub_key)
.map_err(|e| wrap_err("EcdsaSignerKeyManager", e))?;
let (hash, curve, encoding) = crate::get_ecdsa_param_ids(¶ms);
// Check the public key points are on the curve by creating a verifier.
crate::subtle::EcdsaVerifier::new(hash, curve, encoding, &pub_key.x, &pub_key.y)
.map_err(|e| wrap_err("EcdsaVerifierKeyManager: invalid key", e))?;
crate::subtle::validate_ecdsa_params(hash, curve, encoding)?;
Ok(params)
}
/// Validate the given [`EcdsaKeyFormat`](tink_proto::EcdsaKeyFormat) and return
/// the parameters.<|fim▁hole|> let params = key_format
.params
.as_ref()
.ok_or_else(|| TinkError::new("no public key parameters"))?;
let (hash, curve, encoding) = crate::get_ecdsa_param_ids(params);
crate::subtle::validate_ecdsa_params(hash, curve, encoding)?;
Ok((params.clone(), curve))
}<|fim▁end|> | fn validate_key_format(
key_format: &tink_proto::EcdsaKeyFormat,
) -> Result<(tink_proto::EcdsaParams, tink_proto::EllipticCurveType), TinkError> { |
<|file_name|>colorful.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
pygments.styles.colorful
~~~~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by CodeRay.
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ColorfulStyle(Style):
"""
A colorful style, inspired by CodeRay.
"""
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "#888",
Comment.Preproc: "#579",
Comment.Special: "bold #cc0000",
Keyword: "bold #080",
Keyword.Pseudo: "#038",
Keyword.Type: "#339",
Operator: "#333",
Operator.Word: "bold #000",
Name.Builtin: "#007020",
Name.Function: "bold #06B",
Name.Class: "bold #B06",
Name.Namespace: "bold #0e84b5",
Name.Exception: "bold #F00",
Name.Variable: "#963",
Name.Variable.Instance: "#33B",
Name.Variable.Class: "#369",
Name.Variable.Global: "bold #d70",
Name.Constant: "bold #036",
Name.Label: "bold #970",
Name.Entity: "bold #800",
Name.Attribute: "#00C",
Name.Tag: "#070",
Name.Decorator: "bold #555",
String: "bg:#fff0f0",
String.Char: "#04D bg:",
String.Doc: "#D42 bg:",
String.Interpol: "bg:#eee",
String.Escape: "bold #666",
String.Regex: "bg:#fff0ff #000",
String.Symbol: "#A60 bg:",
String.Other: "#D20",
Number: "bold #60E",
Number.Integer: "bold #00D",
Number.Float: "bold #60E",
Number.Hex: "bold #058",
Number.Oct: "bold #40E",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",<|fim▁hole|> Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "#F00 bg:#FAA"
}<|fim▁end|> | Generic.Inserted: "#00A000", |
<|file_name|>exitHandlers.js<|end_file_name|><|fim▁begin|>/*
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.<|fim▁hole|>var ExitHandlers = {
// a widget can dynamically set "do-not-exit" or "do-not-exit-once" classes on the field to indicate we should not
// exit the field. "do-not-exit-once" will be cleared after a single exit attempt.
'manual-exit': {
handleExit: function(fieldModel) {
var doNotExit = fieldModel.element.hasClass('do-not-exit') || fieldModel.element.hasClass('do-not-exit-once');
fieldModel.element.removeClass('do-not-exit-once');
return !doNotExit;
}
},
'leading-zeros': {
handleExit: function(fieldModel) {
var val = fieldModel.element.val();
if (val) { // if the field is blank, leave it alone
var maxLength = parseInt(fieldModel.element.attr('maxlength'));
if (maxLength > 0) {
while (val.length < maxLength) {
val = "0" + val;
}
fieldModel.element.val(val);
}
}
return true;
}
}
};<|fim▁end|> | */
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.