prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>BaristaCardView.js<|end_file_name|><|fim▁begin|>/**
* card_view = new BaristaCardView({el: $("target_selector",
url:"",
title:"",
subtitle:"",
fg_color: "#1b9e77",
image:"",
span_class: "col-lg-12"});
*
* A Backbone View that displays a card of information wrapped in link
* The view is meant to be a top level entry point to other pages
* basic use:
card_view = new BaristaCardView();
* optional arguments:
* @param {string} url the link to navigate to if the card is clicked, defaults to ""
* @param {string} title the title of the card. defaults to "title"
* @param {string} subtitle the subtitle of the card. defaults to "subtitle"
* @param {string} image the link to an image to show as the cards main content. defaults to ""
* @param {string} fg_color the hex color code to use as the foreground color of the view, defaults to
* #1b9e77
* @param {string} span_class a bootstrap span class to size the width of the view, defaults to
* "col-lg-12"
*/
Barista.Views.BaristaCardView = Backbone.View.extend({
/**
* give the view a name to be used throughout the View's functions when it needs to know what its class
* name is
* @type {String}
*/
name: "BaristaCardView",
/**
* supply a base model for the view
* Overide this if you need to use it for dynamic content
* @type {Backbone}
*/
model: new Backbone.Model(),
/**
* overide the view's default initialize method in order to catch options and render a custom template
*/
initialize: function(){
// set up color options. default if not specified
this.fg_color = (this.options.fg_color !== undefined) ? this.options.fg_color : "#1b9e77";
<|fim▁hole|> this.span_class = (this.options.span_class !== undefined) ? this.options.span_class : "col-lg-12";
// set up the url
this.url = (this.options.url !== undefined) ? this.options.url : "";
// set up the title
this.title = (this.options.title !== undefined) ? this.options.title : "Title";
// set up the subtitle
this.subtitle = (this.options.subtitle !== undefined) ? this.options.subtitle : "subtitle";
// set up the image
this.image = (this.options.image !== undefined) ? this.options.image : "";
// bind render to model changes
this.listenTo(this.model,'change', this.update);
// compile the default template for the view
this.compile_template();
},
/**
* use Handlebars to compile the template for the view
*/
compile_template: function(){
var self = this;
this.div_string = 'barista_view' + new Date().getTime();;
this.$el.append(BaristaTemplates.CMapCard({div_string: this.div_string,
span_class: this.span_class,
url: this.url,
title: this.title,
subtitle: this.subtitle,
image: this.image,
fg_color: this.fg_color}));
}
});<|fim▁end|> | // set up the span size |
<|file_name|>Edges-test.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////
// This file is part of openPSTD. //
// //
// openPSTD is free software: you can redistribute it and/or modify //
// it under the terms of the GNU General Public License as published by //
// the Free Software Foundation, either version 3 of the License, or //
// (at your option) any later version. //
// //
// openPSTD is distributed in the hope that it will be useful, //
// but WITHOUT ANY WARRANTY; without even the implied warranty of //
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //
// GNU General Public License for more details. //
// //<|fim▁hole|>// along with openPSTD. If not, see <http://www.gnu.org/licenses/>. //
// //
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
//
// Date: 1-11-2015
//
//
// Authors: M. R. Fortuin
//
//
// Purpose: Test cases for de Edge class
//
//
//////////////////////////////////////////////////////////////////////////
#ifdef STAND_ALONE
# define BOOST_TEST_MODULE Main
#endif
#include <boost/test/unit_test.hpp>
#include <GUI/Edges.h>
using namespace OpenPSTD::GUI;
BOOST_AUTO_TEST_SUITE(GUI)
BOOST_AUTO_TEST_SUITE(GUI_Edges)
BOOST_AUTO_TEST_CASE(TestHorizontal)
{
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).IsHorizontal());
BOOST_CHECK(!Edge(QVector2D(10, 0), QVector2D(10, 10), 0, false).IsHorizontal());
}
BOOST_AUTO_TEST_CASE(TestVertical)
{
BOOST_CHECK(Edge(QVector2D(10, 0), QVector2D(10, 10), 0, false).IsVertical());
BOOST_CHECK(!Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).IsVertical());
}
BOOST_AUTO_TEST_CASE(TestGetStart)
{
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).GetStart() == QVector2D(0, 10));
}
BOOST_AUTO_TEST_CASE(TestGetEnd)
{
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).GetEnd() == QVector2D(10, 10));
}
BOOST_AUTO_TEST_CASE(TestOnSameLine)
{
BOOST_CHECK(Edge::OnSameLine(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false), Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false)));
BOOST_CHECK(!Edge::OnSameLine(Edge(QVector2D(0, 11), QVector2D(10, 11), 0, false), Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false)));
BOOST_CHECK(!Edge::OnSameLine(Edge(QVector2D(10, 0), QVector2D(10, 10), 0, false), Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false)));
BOOST_CHECK(Edge::OnSameLine(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false), Edge(QVector2D(10, 10), QVector2D(20, 10), 0, false)));
BOOST_CHECK(!Edge::OnSameLine(Edge(QVector2D(0, 0), QVector2D(10, 10), 0, false), Edge(QVector2D(10, 10), QVector2D(20, 10), 0, false)));
}
BOOST_AUTO_TEST_CASE(TestSubstract)
{
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).Substract(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false)).empty());
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).Substract(Edge(QVector2D(0, 10), QVector2D(5, 10), 0, false))[0].GetStart() == QVector2D(5, 10));
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).Substract(Edge(QVector2D(0, 10), QVector2D(5, 10), 0, false))[0].GetEnd() == QVector2D(10, 10));
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).Substract(Edge(QVector2D(5, 10), QVector2D(10, 10), 0, false))[0].GetStart() == QVector2D(0, 10));
BOOST_CHECK(Edge(QVector2D(0, 10), QVector2D(10, 10), 0, false).Substract(Edge(QVector2D(5, 10), QVector2D(10, 10), 0, false))[0].GetEnd() == QVector2D(5, 10));
}
BOOST_AUTO_TEST_SUITE_END()
BOOST_AUTO_TEST_SUITE_END()<|fim▁end|> | // You should have received a copy of the GNU General Public License // |
<|file_name|>remove-system-libs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# @author: Martin Siggel <[email protected]>
#
# This script fixes the cmake exports file by
# removing explicit linking to system libraries
import sys, re
def remove_absolute_paths(line):
"""
Removes libraries from the line that are found under /usr
"""
if sys.platform == 'win32':
return line
elif sys.platform == 'darwin':
return re.sub('/Applications/[-_a-zA-Z0-9/.]+.framework[;]?', '', line)
else:
return re.sub('/usr/[-_a-zA-Z0-9/]+.so[;]?', '', line)
def fix_paths(filename):
with open(filename) as f:
lines = f.readlines()
# just select lines containing string IMPORTED_LINK_INTERFACE_LIBRARIES
for i, line in enumerate(lines):
if "IMPORTED_LINK_INTERFACE_LIBRARIES" in line or "INTERFACE_LINK_LIBRARIES" in line:
lines[i] = remove_absolute_paths(line)
<|fim▁hole|> fout = open(filename,'w')
fout.write("".join(lines))
fout.close()
if __name__ == "__main__":
assert(len(sys.argv) == 2)
filename = sys.argv[1]
fix_paths(filename)<|fim▁end|> | |
<|file_name|>TestTaskNotRestarted.java<|end_file_name|><|fim▁begin|>package functionaltests.job;
import java.io.Serializable;
import org.ow2.proactive.scheduler.common.Scheduler;
import org.ow2.proactive.scheduler.common.job.JobId;
import org.ow2.proactive.scheduler.common.job.JobState;
import org.ow2.proactive.scheduler.common.job.TaskFlowJob;
import org.ow2.proactive.scheduler.common.task.JavaTask;
import org.ow2.proactive.scheduler.common.task.TaskResult;
import org.ow2.proactive.scheduler.common.task.TaskStatus;
import org.ow2.proactive.scheduler.common.task.executable.JavaExecutable;
import org.junit.Test;
import functionaltests.utils.SchedulerFunctionalTest;
import static org.junit.Assert.*;
/**
* Test provokes scenario when task gets 'NOT_RESTARTED' status:
* - task is submitted and starts execution
* - user requests to restart task with some delay
* - before task was restarted job is killed
*
*/
public class TestTaskNotRestarted extends SchedulerFunctionalTest {
public static class TestJavaTask extends JavaExecutable {
@Override
public Serializable execute(TaskResult... results) throws Throwable {
Thread.sleep(Long.MAX_VALUE);
return "OK";
}<|fim▁hole|> public void test() throws Exception {
Scheduler scheduler = schedulerHelper.getSchedulerInterface();
JobId jobId = scheduler.submit(createJob());
JobState jobState;
schedulerHelper.waitForEventTaskRunning(jobId, "task1");
jobState = scheduler.getJobState(jobId);
assertEquals(1, jobState.getTasks().size());
assertEquals(TaskStatus.RUNNING, jobState.getTasks().get(0).getStatus());
scheduler.restartTask(jobId, "task1", Integer.MAX_VALUE);
jobState = scheduler.getJobState(jobId);
assertEquals(1, jobState.getTasks().size());
assertEquals(TaskStatus.WAITING_ON_ERROR, jobState.getTasks().get(0).getStatus());
scheduler.killJob(jobId);
jobState = scheduler.getJobState(jobId);
assertEquals(1, jobState.getTasks().size());
assertEquals(TaskStatus.NOT_RESTARTED, jobState.getTasks().get(0).getStatus());
}
private TaskFlowJob createJob() throws Exception {
TaskFlowJob job = new TaskFlowJob();
job.setName(this.getClass().getSimpleName());
JavaTask javaTask = new JavaTask();
javaTask.setExecutableClassName(TestJavaTask.class.getName());
javaTask.setName("task1");
javaTask.setMaxNumberOfExecution(10);
job.addTask(javaTask);
return job;
}
}<|fim▁end|> | }
@Test |
<|file_name|>OpPanel.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import { Actions } from "./types";
export default class OpPanel extends React.Component<{
actions: Actions;
step: number;
}> {
addCnt = () => this.props.actions.addCnt(this.props.step);
render() {
return (
<div
style={{
width: "10rem",
marginTop: "1rem",<|fim▁hole|> <div>Add-Total-Count Panel</div>
<button style={{ marginTop: "1rem" }} onClick={this.addCnt}>
Add {this.props.step}
</button>
</div>
);
}
}<|fim▁end|> | padding: "1rem",
border: "solid #000 1px"
}}
> |
<|file_name|>structure_analysis.py<|end_file_name|><|fim▁begin|># Standard
import os
import sys
# Third Party
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
import pyfilm as pf
from skimage.measure import label
from skimage import filters
plt.rcParams.update({'figure.autolayout': True})
mpl.rcParams['axes.unicode_minus'] = False
#local
from run import Run
import plot_style
plot_style.white()
pal = sns.color_palette('deep')
def structure_analysis(run, perc_thresh, create_film=False):
"""
Calculates the number of structures as a function of time for a given
percentile cut-off. Writes results and plots to an appropriate directory.
Parameters
----------
run : object
Run object calculated by the Run class.
perc_thresh : int
Percentile threshold at which to cut off fluctuations.
create_film : bool
Determines whether a film of the labelled structures is produced.
"""
run.read_ntot()
make_results_dir(run, perc_thresh)
labelled_image, nlabels = label_structures(run, perc_thresh)
no_structures = count_structures(run, labelled_image, nlabels)
plot_no_structures(run, no_structures, perc_thresh)
save_results(run, no_structures, perc_thresh)
if create_film:
make_film(run, no_structures, labelled_image, perc_thresh)
def make_results_dir(run, perc_thresh):
os.system('mkdir -p ' + run.run_dir + 'analysis/structures_' +
str(perc_thresh))
def label_structures(run, perc_thresh):
nlabels = np.empty(run.nt, dtype=int)
labelled_image = np.empty([run.nt, run.nx, run.ny], dtype=int)
for it in range(run.nt):
tmp = run.ntot_i[it,:,:].copy()
# Apply Gaussian filter
tmp = filters.gaussian(tmp, sigma=1)
thresh = np.percentile(tmp, perc_thresh,
interpolation='nearest')
tmp_max = np.max(tmp)
tmp_thresh = thresh/tmp_max
tmp /= tmp_max
tmp[tmp <= tmp_thresh] = 0
tmp[tmp > tmp_thresh] = 1
# Label the resulting structures
labelled_image[it,:,:], nlabels[it] = label(tmp, return_num=True,
background=0)
return(labelled_image, nlabels)
def count_structures(run, labelled_image, nlabels):
"""
Remove any structures which are too small and count structures.
"""<|fim▁hole|> hist = np.histogram(np.ravel(labelled_image[it]),
bins=range(1,nlabels[it]+1))[0]
smallest_struc = np.mean(hist)*0.1
hist = hist[hist > smallest_struc]
nblobs[it] = len(hist)
return(nblobs)
def plot_no_structures(run, no_structures, perc_thresh):
"""
Plot number of structures as a function of time.
"""
plt.clf()
plt.plot(no_structures)
plt.xlabel('Time index')
plt.ylabel('Number of structures')
plt.ylim(0)
plt.savefig(run.run_dir + 'analysis/structures_' + str(perc_thresh) +
'/nblobs.pdf')
def save_results(run, no_structures, perc_thresh):
"""
Save the number of structures as a function of time in a file.
"""
np.savetxt(run.run_dir + 'analysis/structures_' + str(perc_thresh) +
'/nblobs.csv', np.transpose((range(run.nt), no_structures)),
delimiter=',', fmt='%d', header='t_index,nblobs')
def make_film(run, no_structures, labelled_image, perc_thresh):
titles = []
for it in range(run.nt):
titles.append('No. of structures = {}'.format(no_structures[it]))
plot_options = {'cmap':'gist_rainbow',
'levels':np.arange(-1,np.max(labelled_image))
}
options = {'file_name':'structures',
'film_dir':run.run_dir + 'analysis/structures_' +
str(perc_thresh) ,
'frame_dir':run.run_dir + 'analysis/structures_' +
str(perc_thresh) + '/film_frames',
'nprocs':None,
'aspect':'equal',
'xlabel':r'$x$ (m)',
'ylabel':r'$y$ (m)',
'cbar_ticks':np.arange(-1,np.max(labelled_image),2),
'cbar_label':r'Label',
'fps':10,
'bbox_inches':'tight',
'title':titles
}
pf.make_film_2d(run.r, run.z, labelled_image,
plot_options=plot_options, options=options)
if __name__ == '__main__':
run = Run(sys.argv[1])
structure_analysis(run, 75, create_film=False)
structure_analysis(run, 95, create_film=False)<|fim▁end|> | nblobs = np.empty(run.nt, dtype=int)
for it in range(run.nt): |
<|file_name|>prefs.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use resource_files::resources_dir_path;
use rustc_serialize::json::{Json, ToJson};
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::fs::File;
use std::sync::{Arc, Mutex};
lazy_static! {
static ref PREFS: Arc<Mutex<HashMap<String, Pref>>> = {
let prefs = read_prefs().unwrap_or(HashMap::new());
Arc::new(Mutex::new(prefs))<|fim▁hole|> };
}
#[derive(PartialEq, Clone, Debug)]
pub enum PrefValue {
Boolean(bool),
String(String),
Missing
}
impl PrefValue {
pub fn from_json(data: Json) -> Result<PrefValue, ()> {
let value = match data {
Json::Boolean(x) => PrefValue::Boolean(x),
Json::String(x) => PrefValue::String(x),
_ => return Err(())
};
Ok(value)
}
pub fn as_boolean(&self) -> Option<bool> {
match *self {
PrefValue::Boolean(value) => {
Some(value)
},
_ => None
}
}
pub fn as_string(&self) -> Option<&str> {
match *self {
PrefValue::String(ref value) => {
Some(&value)
},
_ => None
}
}
}
impl ToJson for PrefValue {
fn to_json(&self) -> Json {
match *self {
PrefValue::Boolean(x) => {
Json::Boolean(x)
},
PrefValue::String(ref x) => {
Json::String(x.clone())
}
PrefValue::Missing => Json::Null
}
}
}
enum Pref {
NoDefault(Arc<PrefValue>),
WithDefault(Arc<PrefValue>, Option<Arc<PrefValue>>)
}
impl Pref {
pub fn new(value: PrefValue) -> Pref {
Pref::NoDefault(Arc::new(value))
}
fn new_default(value: PrefValue) -> Pref {
Pref::WithDefault(Arc::new(value), None)
}
fn from_json(data: Json) -> Result<Pref, ()> {
let value = try!(PrefValue::from_json(data));
Ok(Pref::new_default(value))
}
pub fn value(&self) -> &Arc<PrefValue> {
match *self {
Pref::NoDefault(ref x) => x,
Pref::WithDefault(ref default, ref override_value) => {
match *override_value {
Some(ref x) => x,
None => default
}
}
}
}
fn set(&mut self, value: PrefValue) {
// TODO - this should error if we try to override a pref of one type
// with a value of a different type
match *self {
Pref::NoDefault(ref mut pref_value) => {
*pref_value = Arc::new(value)
},
Pref::WithDefault(_, ref mut override_value) => {
*override_value = Some(Arc::new(value))
}
}
}
}
impl ToJson for Pref {
fn to_json(&self) -> Json {
self.value().to_json()
}
}
fn read_prefs() -> Result<HashMap<String, Pref>, ()> {
let mut path = resources_dir_path();
path.push("prefs.json");
let mut file = try!(File::open(path).or_else(|e| {
println!("Error opening preferences: {:?}.", e);
Err(())
}));
let json = try!(Json::from_reader(&mut file).or_else(|e| {
println!("Ignoring invalid JSON in preferences: {:?}.", e);
Err(())
}));
let mut prefs = HashMap::new();
if let Json::Object(obj) = json {
for (name, value) in obj.into_iter() {
match Pref::from_json(value) {
Ok(x) => {
prefs.insert(name, x);
},
Err(_) => println!("Ignoring non-boolean/string preference value for {:?}", name)
}
}
}
Ok(prefs)
}
pub fn get_pref(name: &str) -> Arc<PrefValue> {
PREFS.lock().unwrap().get(name).map_or(Arc::new(PrefValue::Missing), |x| x.value().clone())
}
pub fn set_pref(name: &str, value: PrefValue) {
let mut prefs = PREFS.lock().unwrap();
if let Some(pref) = prefs.get_mut(name) {
pref.set(value);
return;
}
prefs.insert(name.to_owned(), Pref::new(value));
}
pub fn reset_pref(name: &str) -> Arc<PrefValue> {
let mut prefs = PREFS.lock().unwrap();
let result = match prefs.get_mut(name) {
None => return Arc::new(PrefValue::Missing),
Some(&mut Pref::NoDefault(_)) => Arc::new(PrefValue::Missing),
Some(&mut Pref::WithDefault(ref default, ref mut set_value)) => {
*set_value = None;
default.clone()
},
};
if *result == PrefValue::Missing {
prefs.remove(name);
}
result
}
pub fn reset_all_prefs() {
let names = {
PREFS.lock().unwrap().keys().cloned().collect::<Vec<String>>()
};
for name in names.iter() {
reset_pref(name);
}
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Numeric traits and functions for generic mathematics
//!
//! These are implemented for the primitive numeric types in `std::{u8, u16,
//! u32, u64, usize, i8, i16, i32, i64, isize, f32, f64}`.
#![stable(feature = "rust1", since = "1.0.0")]
#![allow(missing_docs)]
#[cfg(test)] use fmt::Debug;
use ops::{Add, Sub, Mul, Div, Rem, Neg};
use marker::Copy;
use clone::Clone;
use cmp::{PartialOrd, PartialEq};
pub use core::num::{Int, SignedInt, UnsignedInt};
pub use core::num::{cast, FromPrimitive, NumCast, ToPrimitive};
pub use core::num::{from_int, from_i8, from_i16, from_i32, from_i64};
pub use core::num::{from_uint, from_u8, from_u16, from_u32, from_u64};
pub use core::num::{from_f32, from_f64};
pub use core::num::{FromStrRadix, from_str_radix};
pub use core::num::{FpCategory, ParseIntError, ParseFloatError};
pub use core::num::wrapping;
use option::Option;
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub mod strconv;
/// Mathematical operations on primitive floating point numbers.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Float
: Copy + Clone
+ NumCast
+ PartialOrd
+ PartialEq
+ Neg<Output=Self>
+ Add<Output=Self>
+ Sub<Output=Self>
+ Mul<Output=Self>
+ Div<Output=Self>
+ Rem<Output=Self>
{
// inlined methods from `num::Float`
/// Returns the `NaN` value.
///
/// ```
/// use std::num::Float;
///
/// let nan: f32 = Float::nan();
///
/// assert!(nan.is_nan());
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn nan() -> Self;
/// Returns the infinite value.
///
/// ```
/// use std::num::Float;
/// use std::f32;
///
/// let infinity: f32 = Float::infinity();
///
/// assert!(infinity.is_infinite());
/// assert!(!infinity.is_finite());
/// assert!(infinity > f32::MAX);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn infinity() -> Self;
/// Returns the negative infinite value.
///
/// ```
/// use std::num::Float;
/// use std::f32;
///
/// let neg_infinity: f32 = Float::neg_infinity();
///
/// assert!(neg_infinity.is_infinite());
/// assert!(!neg_infinity.is_finite());
/// assert!(neg_infinity < f32::MIN);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn neg_infinity() -> Self;
/// Returns `0.0`.
///
/// ```
/// use std::num::Float;
///
/// let inf: f32 = Float::infinity();
/// let zero: f32 = Float::zero();
/// let neg_zero: f32 = Float::neg_zero();
///
/// assert_eq!(zero, neg_zero);
/// assert_eq!(7.0f32/inf, zero);
/// assert_eq!(zero * 10.0, zero);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn zero() -> Self;
/// Returns `-0.0`.
///
/// ```
/// use std::num::Float;
///
/// let inf: f32 = Float::infinity();
/// let zero: f32 = Float::zero();
/// let neg_zero: f32 = Float::neg_zero();
///
/// assert_eq!(zero, neg_zero);
/// assert_eq!(7.0f32/inf, zero);
/// assert_eq!(zero * 10.0, zero);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn neg_zero() -> Self;
/// Returns `1.0`.
///
/// ```
/// use std::num::Float;
///
/// let one: f32 = Float::one();
///
/// assert_eq!(one, 1.0f32);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn one() -> Self;
// FIXME (#5527): These should be associated constants
/// Deprecated: use `std::f32::MANTISSA_DIGITS` or `std::f64::MANTISSA_DIGITS`
/// instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::MANTISSA_DIGITS` or \
`std::f64::MANTISSA_DIGITS` as appropriate")]
fn mantissa_digits(unused_self: Option<Self>) -> usize;
/// Deprecated: use `std::f32::DIGITS` or `std::f64::DIGITS` instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::DIGITS` or `std::f64::DIGITS` as appropriate")]
fn digits(unused_self: Option<Self>) -> usize;
/// Deprecated: use `std::f32::EPSILON` or `std::f64::EPSILON` instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::EPSILON` or `std::f64::EPSILON` as appropriate")]
fn epsilon() -> Self;
/// Deprecated: use `std::f32::MIN_EXP` or `std::f64::MIN_EXP` instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::MIN_EXP` or `std::f64::MIN_EXP` as appropriate")]
fn min_exp(unused_self: Option<Self>) -> isize;
/// Deprecated: use `std::f32::MAX_EXP` or `std::f64::MAX_EXP` instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::MAX_EXP` or `std::f64::MAX_EXP` as appropriate")]
fn max_exp(unused_self: Option<Self>) -> isize;
/// Deprecated: use `std::f32::MIN_10_EXP` or `std::f64::MIN_10_EXP` instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::MIN_10_EXP` or `std::f64::MIN_10_EXP` as appropriate")]
fn min_10_exp(unused_self: Option<Self>) -> isize;
/// Deprecated: use `std::f32::MAX_10_EXP` or `std::f64::MAX_10_EXP` instead.
#[unstable(feature = "std_misc")]
#[deprecated(since = "1.0.0",
reason = "use `std::f32::MAX_10_EXP` or `std::f64::MAX_10_EXP` as appropriate")]
fn max_10_exp(unused_self: Option<Self>) -> isize;
/// Returns the smallest finite value that this type can represent.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x: f64 = Float::min_value();
///
/// assert_eq!(x, f64::MIN);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn min_value() -> Self;
/// Returns the smallest normalized positive number that this type can represent.
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn min_pos_value(unused_self: Option<Self>) -> Self;
/// Returns the largest finite value that this type can represent.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x: f64 = Float::max_value();
/// assert_eq!(x, f64::MAX);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn max_value() -> Self;
/// Returns `true` if this value is `NaN` and false otherwise.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let nan = f64::NAN;
/// let f = 7.0;
///
/// assert!(nan.is_nan());
/// assert!(!f.is_nan());
/// ```
#[unstable(feature = "std_misc", reason = "position is undecided")]
fn is_nan(self) -> bool;
/// Returns `true` if this value is positive infinity or negative infinity and
/// false otherwise.
///
/// ```
/// use std::num::Float;
/// use std::f32;
///
/// let f = 7.0f32;
/// let inf: f32 = Float::infinity();
/// let neg_inf: f32 = Float::neg_infinity();
/// let nan: f32 = f32::NAN;
///
/// assert!(!f.is_infinite());
/// assert!(!nan.is_infinite());
///
/// assert!(inf.is_infinite());
/// assert!(neg_inf.is_infinite());
/// ```
#[unstable(feature = "std_misc", reason = "position is undecided")]
fn is_infinite(self) -> bool;
/// Returns `true` if this number is neither infinite nor `NaN`.
///
/// ```
/// use std::num::Float;
/// use std::f32;
///
/// let f = 7.0f32;
/// let inf: f32 = Float::infinity();
/// let neg_inf: f32 = Float::neg_infinity();
/// let nan: f32 = f32::NAN;
///
/// assert!(f.is_finite());
///
/// assert!(!nan.is_finite());
/// assert!(!inf.is_finite());
/// assert!(!neg_inf.is_finite());
/// ```
#[unstable(feature = "std_misc", reason = "position is undecided")]
fn is_finite(self) -> bool;
/// Returns `true` if the number is neither zero, infinite,
/// [subnormal][subnormal], or `NaN`.
///
/// ```
/// use std::num::Float;
/// use std::f32;
///
/// let min = f32::MIN_POSITIVE; // 1.17549435e-38f32
/// let max = f32::MAX;
/// let lower_than_min = 1.0e-40_f32;
/// let zero = 0.0f32;
///
/// assert!(min.is_normal());
/// assert!(max.is_normal());
///
/// assert!(!zero.is_normal());
/// assert!(!f32::NAN.is_normal());
/// assert!(!f32::INFINITY.is_normal());
/// // Values between `0` and `min` are Subnormal.
/// assert!(!lower_than_min.is_normal());
/// ```
/// [subnormal]: http://en.wikipedia.org/wiki/Denormal_number
#[unstable(feature = "std_misc", reason = "position is undecided")]
fn is_normal(self) -> bool;
/// Returns the floating point category of the number. If only one property
/// is going to be tested, it is generally faster to use the specific
/// predicate instead.
///
/// ```
/// use std::num::{Float, FpCategory};
/// use std::f32;
///
/// let num = 12.4f32;
/// let inf = f32::INFINITY;
///
/// assert_eq!(num.classify(), FpCategory::Normal);
/// assert_eq!(inf.classify(), FpCategory::Infinite);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn classify(self) -> FpCategory;
/// Returns the mantissa, base 2 exponent, and sign as integers, respectively.
/// The original number can be recovered by `sign * mantissa * 2 ^ exponent`.
/// The floating point encoding is documented in the [Reference][floating-point].
///
/// ```
/// use std::num::Float;
///
/// let num = 2.0f32;<|fim▁hole|> /// let (mantissa, exponent, sign) = num.integer_decode();
/// let sign_f = sign as f32;
/// let mantissa_f = mantissa as f32;
/// let exponent_f = num.powf(exponent as f32);
///
/// // 1 * 8388608 * 2^(-22) == 2
/// let abs_difference = (sign_f * mantissa_f * exponent_f - num).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
/// [floating-point]: ../../../../../reference.html#machine-types
#[unstable(feature = "std_misc", reason = "signature is undecided")]
fn integer_decode(self) -> (u64, i16, i8);
/// Returns the largest integer less than or equal to a number.
///
/// ```
/// use std::num::Float;
///
/// let f = 3.99;
/// let g = 3.0;
///
/// assert_eq!(f.floor(), 3.0);
/// assert_eq!(g.floor(), 3.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn floor(self) -> Self;
/// Returns the smallest integer greater than or equal to a number.
///
/// ```
/// use std::num::Float;
///
/// let f = 3.01;
/// let g = 4.0;
///
/// assert_eq!(f.ceil(), 4.0);
/// assert_eq!(g.ceil(), 4.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn ceil(self) -> Self;
/// Returns the nearest integer to a number. Round half-way cases away from
/// `0.0`.
///
/// ```
/// use std::num::Float;
///
/// let f = 3.3;
/// let g = -3.3;
///
/// assert_eq!(f.round(), 3.0);
/// assert_eq!(g.round(), -3.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn round(self) -> Self;
/// Return the integer part of a number.
///
/// ```
/// use std::num::Float;
///
/// let f = 3.3;
/// let g = -3.7;
///
/// assert_eq!(f.trunc(), 3.0);
/// assert_eq!(g.trunc(), -3.0);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn trunc(self) -> Self;
/// Returns the fractional part of a number.
///
/// ```
/// use std::num::Float;
///
/// let x = 3.5;
/// let y = -3.5;
/// let abs_difference_x = (x.fract() - 0.5).abs();
/// let abs_difference_y = (y.fract() - (-0.5)).abs();
///
/// assert!(abs_difference_x < 1e-10);
/// assert!(abs_difference_y < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fract(self) -> Self;
/// Computes the absolute value of `self`. Returns `Float::nan()` if the
/// number is `Float::nan()`.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x = 3.5;
/// let y = -3.5;
///
/// let abs_difference_x = (x.abs() - x).abs();
/// let abs_difference_y = (y.abs() - (-y)).abs();
///
/// assert!(abs_difference_x < 1e-10);
/// assert!(abs_difference_y < 1e-10);
///
/// assert!(f64::NAN.abs().is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn abs(self) -> Self;
/// Returns a number that represents the sign of `self`.
///
/// - `1.0` if the number is positive, `+0.0` or `Float::infinity()`
/// - `-1.0` if the number is negative, `-0.0` or `Float::neg_infinity()`
/// - `Float::nan()` if the number is `Float::nan()`
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let f = 3.5;
///
/// assert_eq!(f.signum(), 1.0);
/// assert_eq!(f64::NEG_INFINITY.signum(), -1.0);
///
/// assert!(f64::NAN.signum().is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn signum(self) -> Self;
/// Returns `true` if `self` is positive, including `+0.0` and
/// `Float::infinity()`.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let nan: f64 = f64::NAN;
///
/// let f = 7.0;
/// let g = -7.0;
///
/// assert!(f.is_positive());
/// assert!(!g.is_positive());
/// // Requires both tests to determine if is `NaN`
/// assert!(!nan.is_positive() && !nan.is_negative());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn is_positive(self) -> bool;
/// Returns `true` if `self` is negative, including `-0.0` and
/// `Float::neg_infinity()`.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let nan = f64::NAN;
///
/// let f = 7.0;
/// let g = -7.0;
///
/// assert!(!f.is_negative());
/// assert!(g.is_negative());
/// // Requires both tests to determine if is `NaN`.
/// assert!(!nan.is_positive() && !nan.is_negative());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn is_negative(self) -> bool;
/// Fused multiply-add. Computes `(self * a) + b` with only one rounding
/// error. This produces a more accurate result with better performance than
/// a separate multiplication operation followed by an add.
///
/// ```
/// use std::num::Float;
///
/// let m = 10.0;
/// let x = 4.0;
/// let b = 60.0;
///
/// // 100.0
/// let abs_difference = (m.mul_add(x, b) - (m*x + b)).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn mul_add(self, a: Self, b: Self) -> Self;
/// Take the reciprocal (inverse) of a number, `1/x`.
///
/// ```
/// use std::num::Float;
///
/// let x = 2.0;
/// let abs_difference = (x.recip() - (1.0/x)).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn recip(self) -> Self;
/// Raise a number to an integer power.
///
/// Using this function is generally faster than using `powf`
///
/// ```
/// use std::num::Float;
///
/// let x = 2.0;
/// let abs_difference = (x.powi(2) - x*x).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn powi(self, n: i32) -> Self;
/// Raise a number to a floating point power.
///
/// ```
/// use std::num::Float;
///
/// let x = 2.0;
/// let abs_difference = (x.powf(2.0) - x*x).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn powf(self, n: Self) -> Self;
/// Take the square root of a number.
///
/// Returns NaN if `self` is a negative number.
///
/// ```
/// use std::num::Float;
///
/// let positive = 4.0;
/// let negative = -4.0;
///
/// let abs_difference = (positive.sqrt() - 2.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// assert!(negative.sqrt().is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn sqrt(self) -> Self;
/// Take the reciprocal (inverse) square root of a number, `1/sqrt(x)`.
///
/// ```
/// use std::num::Float;
///
/// let f = 4.0;
///
/// let abs_difference = (f.rsqrt() - 0.5).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn rsqrt(self) -> Self;
/// Returns `e^(self)`, (the exponential function).
///
/// ```
/// use std::num::Float;
///
/// let one = 1.0;
/// // e^1
/// let e = one.exp();
///
/// // ln(e) - 1 == 0
/// let abs_difference = (e.ln() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn exp(self) -> Self;
/// Returns `2^(self)`.
///
/// ```
/// use std::num::Float;
///
/// let f = 2.0;
///
/// // 2^2 - 4 == 0
/// let abs_difference = (f.exp2() - 4.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn exp2(self) -> Self;
/// Returns the natural logarithm of the number.
///
/// ```
/// use std::num::Float;
///
/// let one = 1.0;
/// // e^1
/// let e = one.exp();
///
/// // ln(e) - 1 == 0
/// let abs_difference = (e.ln() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn ln(self) -> Self;
/// Returns the logarithm of the number with respect to an arbitrary base.
///
/// ```
/// use std::num::Float;
///
/// let ten = 10.0;
/// let two = 2.0;
///
/// // log10(10) - 1 == 0
/// let abs_difference_10 = (ten.log(10.0) - 1.0).abs();
///
/// // log2(2) - 1 == 0
/// let abs_difference_2 = (two.log(2.0) - 1.0).abs();
///
/// assert!(abs_difference_10 < 1e-10);
/// assert!(abs_difference_2 < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn log(self, base: Self) -> Self;
/// Returns the base 2 logarithm of the number.
///
/// ```
/// use std::num::Float;
///
/// let two = 2.0;
///
/// // log2(2) - 1 == 0
/// let abs_difference = (two.log2() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn log2(self) -> Self;
/// Returns the base 10 logarithm of the number.
///
/// ```
/// use std::num::Float;
///
/// let ten = 10.0;
///
/// // log10(10) - 1 == 0
/// let abs_difference = (ten.log10() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn log10(self) -> Self;
/// Convert radians to degrees.
///
/// ```
/// use std::num::Float;
/// use std::f64::consts;
///
/// let angle = consts::PI;
///
/// let abs_difference = (angle.to_degrees() - 180.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc", reason = "desirability is unclear")]
fn to_degrees(self) -> Self;
/// Convert degrees to radians.
///
/// ```
/// use std::num::Float;
/// use std::f64::consts;
///
/// let angle = 180.0;
///
/// let abs_difference = (angle.to_radians() - consts::PI).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc", reason = "desirability is unclear")]
fn to_radians(self) -> Self;
/// Constructs a floating point number of `x*2^exp`.
///
/// ```
/// use std::num::Float;
///
/// // 3*2^2 - 12 == 0
/// let abs_difference = (Float::ldexp(3.0, 2) - 12.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "pending integer conventions")]
fn ldexp(x: Self, exp: isize) -> Self;
/// Breaks the number into a normalized fraction and a base-2 exponent,
/// satisfying:
///
/// * `self = x * 2^exp`
/// * `0.5 <= abs(x) < 1.0`
///
/// ```
/// use std::num::Float;
///
/// let x = 4.0;
///
/// // (1/2)*2^3 -> 1 * 8/2 -> 4.0
/// let f = x.frexp();
/// let abs_difference_0 = (f.0 - 0.5).abs();
/// let abs_difference_1 = (f.1 as f64 - 3.0).abs();
///
/// assert!(abs_difference_0 < 1e-10);
/// assert!(abs_difference_1 < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "pending integer conventions")]
fn frexp(self) -> (Self, isize);
/// Returns the next representable floating-point value in the direction of
/// `other`.
///
/// ```
/// use std::num::Float;
///
/// let x = 1.0f32;
///
/// let abs_diff = (x.next_after(2.0) - 1.00000011920928955078125_f32).abs();
///
/// assert!(abs_diff < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn next_after(self, other: Self) -> Self;
/// Returns the maximum of the two numbers.
///
/// ```
/// use std::num::Float;
///
/// let x = 1.0;
/// let y = 2.0;
///
/// assert_eq!(x.max(y), y);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn max(self, other: Self) -> Self;
/// Returns the minimum of the two numbers.
///
/// ```
/// use std::num::Float;
///
/// let x = 1.0;
/// let y = 2.0;
///
/// assert_eq!(x.min(y), x);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn min(self, other: Self) -> Self;
/// The positive difference of two numbers.
///
/// * If `self <= other`: `0:0`
/// * Else: `self - other`
///
/// ```
/// use std::num::Float;
///
/// let x = 3.0;
/// let y = -3.0;
///
/// let abs_difference_x = (x.abs_sub(1.0) - 2.0).abs();
/// let abs_difference_y = (y.abs_sub(1.0) - 0.0).abs();
///
/// assert!(abs_difference_x < 1e-10);
/// assert!(abs_difference_y < 1e-10);
/// ```
#[unstable(feature = "std_misc", reason = "may be renamed")]
fn abs_sub(self, other: Self) -> Self;
/// Take the cubic root of a number.
///
/// ```
/// use std::num::Float;
///
/// let x = 8.0;
///
/// // x^(1/3) - 2 == 0
/// let abs_difference = (x.cbrt() - 2.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc", reason = "may be renamed")]
fn cbrt(self) -> Self;
/// Calculate the length of the hypotenuse of a right-angle triangle given
/// legs of length `x` and `y`.
///
/// ```
/// use std::num::Float;
///
/// let x = 2.0;
/// let y = 3.0;
///
/// // sqrt(x^2 + y^2)
/// let abs_difference = (x.hypot(y) - (x.powi(2) + y.powi(2)).sqrt()).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc",
reason = "unsure about its place in the world")]
fn hypot(self, other: Self) -> Self;
/// Computes the sine of a number (in radians).
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x = f64::consts::PI/2.0;
///
/// let abs_difference = (x.sin() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn sin(self) -> Self;
/// Computes the cosine of a number (in radians).
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x = 2.0*f64::consts::PI;
///
/// let abs_difference = (x.cos() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn cos(self) -> Self;
/// Computes the tangent of a number (in radians).
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x = f64::consts::PI/4.0;
/// let abs_difference = (x.tan() - 1.0).abs();
///
/// assert!(abs_difference < 1e-14);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn tan(self) -> Self;
/// Computes the arcsine of a number. Return value is in radians in
/// the range [-pi/2, pi/2] or NaN if the number is outside the range
/// [-1, 1].
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let f = f64::consts::PI / 2.0;
///
/// // asin(sin(pi/2))
/// let abs_difference = (f.sin().asin() - f64::consts::PI / 2.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn asin(self) -> Self;
/// Computes the arccosine of a number. Return value is in radians in
/// the range [0, pi] or NaN if the number is outside the range
/// [-1, 1].
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let f = f64::consts::PI / 4.0;
///
/// // acos(cos(pi/4))
/// let abs_difference = (f.cos().acos() - f64::consts::PI / 4.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn acos(self) -> Self;
/// Computes the arctangent of a number. Return value is in radians in the
/// range [-pi/2, pi/2];
///
/// ```
/// use std::num::Float;
///
/// let f = 1.0;
///
/// // atan(tan(1))
/// let abs_difference = (f.tan().atan() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn atan(self) -> Self;
/// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`).
///
/// * `x = 0`, `y = 0`: `0`
/// * `x >= 0`: `arctan(y/x)` -> `[-pi/2, pi/2]`
/// * `y >= 0`: `arctan(y/x) + pi` -> `(pi/2, pi]`
/// * `y < 0`: `arctan(y/x) - pi` -> `(-pi, -pi/2)`
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let pi = f64::consts::PI;
/// // All angles from horizontal right (+x)
/// // 45 deg counter-clockwise
/// let x1 = 3.0;
/// let y1 = -3.0;
///
/// // 135 deg clockwise
/// let x2 = -3.0;
/// let y2 = 3.0;
///
/// let abs_difference_1 = (y1.atan2(x1) - (-pi/4.0)).abs();
/// let abs_difference_2 = (y2.atan2(x2) - 3.0*pi/4.0).abs();
///
/// assert!(abs_difference_1 < 1e-10);
/// assert!(abs_difference_2 < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn atan2(self, other: Self) -> Self;
/// Simultaneously computes the sine and cosine of the number, `x`. Returns
/// `(sin(x), cos(x))`.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x = f64::consts::PI/4.0;
/// let f = x.sin_cos();
///
/// let abs_difference_0 = (f.0 - x.sin()).abs();
/// let abs_difference_1 = (f.1 - x.cos()).abs();
///
/// assert!(abs_difference_0 < 1e-10);
/// assert!(abs_difference_0 < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn sin_cos(self) -> (Self, Self);
/// Returns `e^(self) - 1` in a way that is accurate even if the
/// number is close to zero.
///
/// ```
/// use std::num::Float;
///
/// let x = 7.0;
///
/// // e^(ln(7)) - 1
/// let abs_difference = (x.ln().exp_m1() - 6.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc", reason = "may be renamed")]
fn exp_m1(self) -> Self;
/// Returns `ln(1+n)` (natural logarithm) more accurately than if
/// the operations were performed separately.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let x = f64::consts::E - 1.0;
///
/// // ln(1 + (e - 1)) == ln(e) == 1
/// let abs_difference = (x.ln_1p() - 1.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[unstable(feature = "std_misc", reason = "may be renamed")]
fn ln_1p(self) -> Self;
/// Hyperbolic sine function.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let x = 1.0;
///
/// let f = x.sinh();
/// // Solving sinh() at 1 gives `(e^2-1)/(2e)`
/// let g = (e*e - 1.0)/(2.0*e);
/// let abs_difference = (f - g).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn sinh(self) -> Self;
/// Hyperbolic cosine function.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let x = 1.0;
/// let f = x.cosh();
/// // Solving cosh() at 1 gives this result
/// let g = (e*e + 1.0)/(2.0*e);
/// let abs_difference = (f - g).abs();
///
/// // Same result
/// assert!(abs_difference < 1.0e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn cosh(self) -> Self;
/// Hyperbolic tangent function.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let x = 1.0;
///
/// let f = x.tanh();
/// // Solving tanh() at 1 gives `(1 - e^(-2))/(1 + e^(-2))`
/// let g = (1.0 - e.powi(-2))/(1.0 + e.powi(-2));
/// let abs_difference = (f - g).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn tanh(self) -> Self;
/// Inverse hyperbolic sine function.
///
/// ```
/// use std::num::Float;
///
/// let x = 1.0;
/// let f = x.sinh().asinh();
///
/// let abs_difference = (f - x).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn asinh(self) -> Self;
/// Inverse hyperbolic cosine function.
///
/// ```
/// use std::num::Float;
///
/// let x = 1.0;
/// let f = x.cosh().acosh();
///
/// let abs_difference = (f - x).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn acosh(self) -> Self;
/// Inverse hyperbolic tangent function.
///
/// ```
/// use std::num::Float;
/// use std::f64;
///
/// let e = f64::consts::E;
/// let f = e.tanh().atanh();
///
/// let abs_difference = (f - e).abs();
///
/// assert!(abs_difference < 1.0e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn atanh(self) -> Self;
}
/// Helper function for testing numeric operations
#[cfg(test)]
pub fn test_num<T>(ten: T, two: T) where
T: PartialEq + NumCast
+ Add<Output=T> + Sub<Output=T>
+ Mul<Output=T> + Div<Output=T>
+ Rem<Output=T> + Debug
+ Copy
{
assert_eq!(ten.add(two), cast(12).unwrap());
assert_eq!(ten.sub(two), cast(8).unwrap());
assert_eq!(ten.mul(two), cast(20).unwrap());
assert_eq!(ten.div(two), cast(5).unwrap());
assert_eq!(ten.rem(two), cast(0).unwrap());
assert_eq!(ten.add(two), ten + two);
assert_eq!(ten.sub(two), ten - two);
assert_eq!(ten.mul(two), ten * two);
assert_eq!(ten.div(two), ten / two);
assert_eq!(ten.rem(two), ten % two);
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use super::*;
use i8;
use i16;
use i32;
use i64;
use isize;
use u8;
use u16;
use u32;
use u64;
use usize;
macro_rules! test_cast_20 {
($_20:expr) => ({
let _20 = $_20;
assert_eq!(20usize, _20.to_uint().unwrap());
assert_eq!(20u8, _20.to_u8().unwrap());
assert_eq!(20u16, _20.to_u16().unwrap());
assert_eq!(20u32, _20.to_u32().unwrap());
assert_eq!(20u64, _20.to_u64().unwrap());
assert_eq!(20, _20.to_int().unwrap());
assert_eq!(20i8, _20.to_i8().unwrap());
assert_eq!(20i16, _20.to_i16().unwrap());
assert_eq!(20i32, _20.to_i32().unwrap());
assert_eq!(20i64, _20.to_i64().unwrap());
assert_eq!(20f32, _20.to_f32().unwrap());
assert_eq!(20f64, _20.to_f64().unwrap());
assert_eq!(_20, NumCast::from(20usize).unwrap());
assert_eq!(_20, NumCast::from(20u8).unwrap());
assert_eq!(_20, NumCast::from(20u16).unwrap());
assert_eq!(_20, NumCast::from(20u32).unwrap());
assert_eq!(_20, NumCast::from(20u64).unwrap());
assert_eq!(_20, NumCast::from(20).unwrap());
assert_eq!(_20, NumCast::from(20i8).unwrap());
assert_eq!(_20, NumCast::from(20i16).unwrap());
assert_eq!(_20, NumCast::from(20i32).unwrap());
assert_eq!(_20, NumCast::from(20i64).unwrap());
assert_eq!(_20, NumCast::from(20f32).unwrap());
assert_eq!(_20, NumCast::from(20f64).unwrap());
assert_eq!(_20, cast(20usize).unwrap());
assert_eq!(_20, cast(20u8).unwrap());
assert_eq!(_20, cast(20u16).unwrap());
assert_eq!(_20, cast(20u32).unwrap());
assert_eq!(_20, cast(20u64).unwrap());
assert_eq!(_20, cast(20).unwrap());
assert_eq!(_20, cast(20i8).unwrap());
assert_eq!(_20, cast(20i16).unwrap());
assert_eq!(_20, cast(20i32).unwrap());
assert_eq!(_20, cast(20i64).unwrap());
assert_eq!(_20, cast(20f32).unwrap());
assert_eq!(_20, cast(20f64).unwrap());
})
}
#[test] fn test_u8_cast() { test_cast_20!(20u8) }
#[test] fn test_u16_cast() { test_cast_20!(20u16) }
#[test] fn test_u32_cast() { test_cast_20!(20u32) }
#[test] fn test_u64_cast() { test_cast_20!(20u64) }
#[test] fn test_uint_cast() { test_cast_20!(20usize) }
#[test] fn test_i8_cast() { test_cast_20!(20i8) }
#[test] fn test_i16_cast() { test_cast_20!(20i16) }
#[test] fn test_i32_cast() { test_cast_20!(20i32) }
#[test] fn test_i64_cast() { test_cast_20!(20i64) }
#[test] fn test_int_cast() { test_cast_20!(20) }
#[test] fn test_f32_cast() { test_cast_20!(20f32) }
#[test] fn test_f64_cast() { test_cast_20!(20f64) }
#[test]
fn test_cast_range_int_min() {
assert_eq!(isize::MIN.to_int(), Some(isize::MIN as isize));
assert_eq!(isize::MIN.to_i8(), None);
assert_eq!(isize::MIN.to_i16(), None);
// isize::MIN.to_i32() is word-size specific
assert_eq!(isize::MIN.to_i64(), Some(isize::MIN as i64));
assert_eq!(isize::MIN.to_uint(), None);
assert_eq!(isize::MIN.to_u8(), None);
assert_eq!(isize::MIN.to_u16(), None);
assert_eq!(isize::MIN.to_u32(), None);
assert_eq!(isize::MIN.to_u64(), None);
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(isize::MIN.to_i32(), Some(isize::MIN as i32));
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(isize::MIN.to_i32(), None);
}
check_word_size();
}
#[test]
fn test_cast_range_i8_min() {
assert_eq!(i8::MIN.to_int(), Some(i8::MIN as isize));
assert_eq!(i8::MIN.to_i8(), Some(i8::MIN as i8));
assert_eq!(i8::MIN.to_i16(), Some(i8::MIN as i16));
assert_eq!(i8::MIN.to_i32(), Some(i8::MIN as i32));
assert_eq!(i8::MIN.to_i64(), Some(i8::MIN as i64));
assert_eq!(i8::MIN.to_uint(), None);
assert_eq!(i8::MIN.to_u8(), None);
assert_eq!(i8::MIN.to_u16(), None);
assert_eq!(i8::MIN.to_u32(), None);
assert_eq!(i8::MIN.to_u64(), None);
}
#[test]
fn test_cast_range_i16_min() {
assert_eq!(i16::MIN.to_int(), Some(i16::MIN as isize));
assert_eq!(i16::MIN.to_i8(), None);
assert_eq!(i16::MIN.to_i16(), Some(i16::MIN as i16));
assert_eq!(i16::MIN.to_i32(), Some(i16::MIN as i32));
assert_eq!(i16::MIN.to_i64(), Some(i16::MIN as i64));
assert_eq!(i16::MIN.to_uint(), None);
assert_eq!(i16::MIN.to_u8(), None);
assert_eq!(i16::MIN.to_u16(), None);
assert_eq!(i16::MIN.to_u32(), None);
assert_eq!(i16::MIN.to_u64(), None);
}
#[test]
fn test_cast_range_i32_min() {
assert_eq!(i32::MIN.to_int(), Some(i32::MIN as isize));
assert_eq!(i32::MIN.to_i8(), None);
assert_eq!(i32::MIN.to_i16(), None);
assert_eq!(i32::MIN.to_i32(), Some(i32::MIN as i32));
assert_eq!(i32::MIN.to_i64(), Some(i32::MIN as i64));
assert_eq!(i32::MIN.to_uint(), None);
assert_eq!(i32::MIN.to_u8(), None);
assert_eq!(i32::MIN.to_u16(), None);
assert_eq!(i32::MIN.to_u32(), None);
assert_eq!(i32::MIN.to_u64(), None);
}
#[test]
fn test_cast_range_i64_min() {
// i64::MIN.to_int() is word-size specific
assert_eq!(i64::MIN.to_i8(), None);
assert_eq!(i64::MIN.to_i16(), None);
assert_eq!(i64::MIN.to_i32(), None);
assert_eq!(i64::MIN.to_i64(), Some(i64::MIN as i64));
assert_eq!(i64::MIN.to_uint(), None);
assert_eq!(i64::MIN.to_u8(), None);
assert_eq!(i64::MIN.to_u16(), None);
assert_eq!(i64::MIN.to_u32(), None);
assert_eq!(i64::MIN.to_u64(), None);
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(i64::MIN.to_int(), None);
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(i64::MIN.to_int(), Some(i64::MIN as isize));
}
check_word_size();
}
#[test]
fn test_cast_range_int_max() {
assert_eq!(isize::MAX.to_int(), Some(isize::MAX as isize));
assert_eq!(isize::MAX.to_i8(), None);
assert_eq!(isize::MAX.to_i16(), None);
// isize::MAX.to_i32() is word-size specific
assert_eq!(isize::MAX.to_i64(), Some(isize::MAX as i64));
assert_eq!(isize::MAX.to_u8(), None);
assert_eq!(isize::MAX.to_u16(), None);
// isize::MAX.to_u32() is word-size specific
assert_eq!(isize::MAX.to_u64(), Some(isize::MAX as u64));
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(isize::MAX.to_i32(), Some(isize::MAX as i32));
assert_eq!(isize::MAX.to_u32(), Some(isize::MAX as u32));
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(isize::MAX.to_i32(), None);
assert_eq!(isize::MAX.to_u32(), None);
}
check_word_size();
}
#[test]
fn test_cast_range_i8_max() {
assert_eq!(i8::MAX.to_int(), Some(i8::MAX as isize));
assert_eq!(i8::MAX.to_i8(), Some(i8::MAX as i8));
assert_eq!(i8::MAX.to_i16(), Some(i8::MAX as i16));
assert_eq!(i8::MAX.to_i32(), Some(i8::MAX as i32));
assert_eq!(i8::MAX.to_i64(), Some(i8::MAX as i64));
assert_eq!(i8::MAX.to_uint(), Some(i8::MAX as usize));
assert_eq!(i8::MAX.to_u8(), Some(i8::MAX as u8));
assert_eq!(i8::MAX.to_u16(), Some(i8::MAX as u16));
assert_eq!(i8::MAX.to_u32(), Some(i8::MAX as u32));
assert_eq!(i8::MAX.to_u64(), Some(i8::MAX as u64));
}
#[test]
fn test_cast_range_i16_max() {
assert_eq!(i16::MAX.to_int(), Some(i16::MAX as isize));
assert_eq!(i16::MAX.to_i8(), None);
assert_eq!(i16::MAX.to_i16(), Some(i16::MAX as i16));
assert_eq!(i16::MAX.to_i32(), Some(i16::MAX as i32));
assert_eq!(i16::MAX.to_i64(), Some(i16::MAX as i64));
assert_eq!(i16::MAX.to_uint(), Some(i16::MAX as usize));
assert_eq!(i16::MAX.to_u8(), None);
assert_eq!(i16::MAX.to_u16(), Some(i16::MAX as u16));
assert_eq!(i16::MAX.to_u32(), Some(i16::MAX as u32));
assert_eq!(i16::MAX.to_u64(), Some(i16::MAX as u64));
}
#[test]
fn test_cast_range_i32_max() {
assert_eq!(i32::MAX.to_int(), Some(i32::MAX as isize));
assert_eq!(i32::MAX.to_i8(), None);
assert_eq!(i32::MAX.to_i16(), None);
assert_eq!(i32::MAX.to_i32(), Some(i32::MAX as i32));
assert_eq!(i32::MAX.to_i64(), Some(i32::MAX as i64));
assert_eq!(i32::MAX.to_uint(), Some(i32::MAX as usize));
assert_eq!(i32::MAX.to_u8(), None);
assert_eq!(i32::MAX.to_u16(), None);
assert_eq!(i32::MAX.to_u32(), Some(i32::MAX as u32));
assert_eq!(i32::MAX.to_u64(), Some(i32::MAX as u64));
}
#[test]
fn test_cast_range_i64_max() {
// i64::MAX.to_int() is word-size specific
assert_eq!(i64::MAX.to_i8(), None);
assert_eq!(i64::MAX.to_i16(), None);
assert_eq!(i64::MAX.to_i32(), None);
assert_eq!(i64::MAX.to_i64(), Some(i64::MAX as i64));
// i64::MAX.to_uint() is word-size specific
assert_eq!(i64::MAX.to_u8(), None);
assert_eq!(i64::MAX.to_u16(), None);
assert_eq!(i64::MAX.to_u32(), None);
assert_eq!(i64::MAX.to_u64(), Some(i64::MAX as u64));
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(i64::MAX.to_int(), None);
assert_eq!(i64::MAX.to_uint(), None);
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(i64::MAX.to_int(), Some(i64::MAX as isize));
assert_eq!(i64::MAX.to_uint(), Some(i64::MAX as usize));
}
check_word_size();
}
#[test]
fn test_cast_range_uint_min() {
assert_eq!(usize::MIN.to_int(), Some(usize::MIN as isize));
assert_eq!(usize::MIN.to_i8(), Some(usize::MIN as i8));
assert_eq!(usize::MIN.to_i16(), Some(usize::MIN as i16));
assert_eq!(usize::MIN.to_i32(), Some(usize::MIN as i32));
assert_eq!(usize::MIN.to_i64(), Some(usize::MIN as i64));
assert_eq!(usize::MIN.to_uint(), Some(usize::MIN as usize));
assert_eq!(usize::MIN.to_u8(), Some(usize::MIN as u8));
assert_eq!(usize::MIN.to_u16(), Some(usize::MIN as u16));
assert_eq!(usize::MIN.to_u32(), Some(usize::MIN as u32));
assert_eq!(usize::MIN.to_u64(), Some(usize::MIN as u64));
}
#[test]
fn test_cast_range_u8_min() {
assert_eq!(u8::MIN.to_int(), Some(u8::MIN as isize));
assert_eq!(u8::MIN.to_i8(), Some(u8::MIN as i8));
assert_eq!(u8::MIN.to_i16(), Some(u8::MIN as i16));
assert_eq!(u8::MIN.to_i32(), Some(u8::MIN as i32));
assert_eq!(u8::MIN.to_i64(), Some(u8::MIN as i64));
assert_eq!(u8::MIN.to_uint(), Some(u8::MIN as usize));
assert_eq!(u8::MIN.to_u8(), Some(u8::MIN as u8));
assert_eq!(u8::MIN.to_u16(), Some(u8::MIN as u16));
assert_eq!(u8::MIN.to_u32(), Some(u8::MIN as u32));
assert_eq!(u8::MIN.to_u64(), Some(u8::MIN as u64));
}
#[test]
fn test_cast_range_u16_min() {
assert_eq!(u16::MIN.to_int(), Some(u16::MIN as isize));
assert_eq!(u16::MIN.to_i8(), Some(u16::MIN as i8));
assert_eq!(u16::MIN.to_i16(), Some(u16::MIN as i16));
assert_eq!(u16::MIN.to_i32(), Some(u16::MIN as i32));
assert_eq!(u16::MIN.to_i64(), Some(u16::MIN as i64));
assert_eq!(u16::MIN.to_uint(), Some(u16::MIN as usize));
assert_eq!(u16::MIN.to_u8(), Some(u16::MIN as u8));
assert_eq!(u16::MIN.to_u16(), Some(u16::MIN as u16));
assert_eq!(u16::MIN.to_u32(), Some(u16::MIN as u32));
assert_eq!(u16::MIN.to_u64(), Some(u16::MIN as u64));
}
#[test]
fn test_cast_range_u32_min() {
assert_eq!(u32::MIN.to_int(), Some(u32::MIN as isize));
assert_eq!(u32::MIN.to_i8(), Some(u32::MIN as i8));
assert_eq!(u32::MIN.to_i16(), Some(u32::MIN as i16));
assert_eq!(u32::MIN.to_i32(), Some(u32::MIN as i32));
assert_eq!(u32::MIN.to_i64(), Some(u32::MIN as i64));
assert_eq!(u32::MIN.to_uint(), Some(u32::MIN as usize));
assert_eq!(u32::MIN.to_u8(), Some(u32::MIN as u8));
assert_eq!(u32::MIN.to_u16(), Some(u32::MIN as u16));
assert_eq!(u32::MIN.to_u32(), Some(u32::MIN as u32));
assert_eq!(u32::MIN.to_u64(), Some(u32::MIN as u64));
}
#[test]
fn test_cast_range_u64_min() {
assert_eq!(u64::MIN.to_int(), Some(u64::MIN as isize));
assert_eq!(u64::MIN.to_i8(), Some(u64::MIN as i8));
assert_eq!(u64::MIN.to_i16(), Some(u64::MIN as i16));
assert_eq!(u64::MIN.to_i32(), Some(u64::MIN as i32));
assert_eq!(u64::MIN.to_i64(), Some(u64::MIN as i64));
assert_eq!(u64::MIN.to_uint(), Some(u64::MIN as usize));
assert_eq!(u64::MIN.to_u8(), Some(u64::MIN as u8));
assert_eq!(u64::MIN.to_u16(), Some(u64::MIN as u16));
assert_eq!(u64::MIN.to_u32(), Some(u64::MIN as u32));
assert_eq!(u64::MIN.to_u64(), Some(u64::MIN as u64));
}
#[test]
fn test_cast_range_uint_max() {
assert_eq!(usize::MAX.to_int(), None);
assert_eq!(usize::MAX.to_i8(), None);
assert_eq!(usize::MAX.to_i16(), None);
assert_eq!(usize::MAX.to_i32(), None);
// usize::MAX.to_i64() is word-size specific
assert_eq!(usize::MAX.to_u8(), None);
assert_eq!(usize::MAX.to_u16(), None);
// usize::MAX.to_u32() is word-size specific
assert_eq!(usize::MAX.to_u64(), Some(usize::MAX as u64));
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(usize::MAX.to_u32(), Some(usize::MAX as u32));
assert_eq!(usize::MAX.to_i64(), Some(usize::MAX as i64));
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(usize::MAX.to_u32(), None);
assert_eq!(usize::MAX.to_i64(), None);
}
check_word_size();
}
#[test]
fn test_cast_range_u8_max() {
assert_eq!(u8::MAX.to_int(), Some(u8::MAX as isize));
assert_eq!(u8::MAX.to_i8(), None);
assert_eq!(u8::MAX.to_i16(), Some(u8::MAX as i16));
assert_eq!(u8::MAX.to_i32(), Some(u8::MAX as i32));
assert_eq!(u8::MAX.to_i64(), Some(u8::MAX as i64));
assert_eq!(u8::MAX.to_uint(), Some(u8::MAX as usize));
assert_eq!(u8::MAX.to_u8(), Some(u8::MAX as u8));
assert_eq!(u8::MAX.to_u16(), Some(u8::MAX as u16));
assert_eq!(u8::MAX.to_u32(), Some(u8::MAX as u32));
assert_eq!(u8::MAX.to_u64(), Some(u8::MAX as u64));
}
#[test]
fn test_cast_range_u16_max() {
assert_eq!(u16::MAX.to_int(), Some(u16::MAX as isize));
assert_eq!(u16::MAX.to_i8(), None);
assert_eq!(u16::MAX.to_i16(), None);
assert_eq!(u16::MAX.to_i32(), Some(u16::MAX as i32));
assert_eq!(u16::MAX.to_i64(), Some(u16::MAX as i64));
assert_eq!(u16::MAX.to_uint(), Some(u16::MAX as usize));
assert_eq!(u16::MAX.to_u8(), None);
assert_eq!(u16::MAX.to_u16(), Some(u16::MAX as u16));
assert_eq!(u16::MAX.to_u32(), Some(u16::MAX as u32));
assert_eq!(u16::MAX.to_u64(), Some(u16::MAX as u64));
}
#[test]
fn test_cast_range_u32_max() {
// u32::MAX.to_int() is word-size specific
assert_eq!(u32::MAX.to_i8(), None);
assert_eq!(u32::MAX.to_i16(), None);
assert_eq!(u32::MAX.to_i32(), None);
assert_eq!(u32::MAX.to_i64(), Some(u32::MAX as i64));
assert_eq!(u32::MAX.to_uint(), Some(u32::MAX as usize));
assert_eq!(u32::MAX.to_u8(), None);
assert_eq!(u32::MAX.to_u16(), None);
assert_eq!(u32::MAX.to_u32(), Some(u32::MAX as u32));
assert_eq!(u32::MAX.to_u64(), Some(u32::MAX as u64));
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(u32::MAX.to_int(), None);
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(u32::MAX.to_int(), Some(u32::MAX as isize));
}
check_word_size();
}
#[test]
fn test_cast_range_u64_max() {
assert_eq!(u64::MAX.to_int(), None);
assert_eq!(u64::MAX.to_i8(), None);
assert_eq!(u64::MAX.to_i16(), None);
assert_eq!(u64::MAX.to_i32(), None);
assert_eq!(u64::MAX.to_i64(), None);
// u64::MAX.to_uint() is word-size specific
assert_eq!(u64::MAX.to_u8(), None);
assert_eq!(u64::MAX.to_u16(), None);
assert_eq!(u64::MAX.to_u32(), None);
assert_eq!(u64::MAX.to_u64(), Some(u64::MAX as u64));
#[cfg(target_pointer_width = "32")]
fn check_word_size() {
assert_eq!(u64::MAX.to_uint(), None);
}
#[cfg(target_pointer_width = "64")]
fn check_word_size() {
assert_eq!(u64::MAX.to_uint(), Some(u64::MAX as usize));
}
check_word_size();
}
#[test]
fn test_saturating_add_uint() {
use usize::MAX;
assert_eq!(3_usize.saturating_add(5_usize), 8_usize);
assert_eq!(3_usize.saturating_add(MAX-1), MAX);
assert_eq!(MAX.saturating_add(MAX), MAX);
assert_eq!((MAX-2).saturating_add(1), MAX-1);
}
#[test]
fn test_saturating_sub_uint() {
use usize::MAX;
assert_eq!(5_usize.saturating_sub(3_usize), 2_usize);
assert_eq!(3_usize.saturating_sub(5_usize), 0_usize);
assert_eq!(0_usize.saturating_sub(1_usize), 0_usize);
assert_eq!((MAX-1).saturating_sub(MAX), 0);
}
#[test]
fn test_saturating_add_int() {
use isize::{MIN,MAX};
assert_eq!(3.saturating_add(5), 8);
assert_eq!(3.saturating_add(MAX-1), MAX);
assert_eq!(MAX.saturating_add(MAX), MAX);
assert_eq!((MAX-2).saturating_add(1), MAX-1);
assert_eq!(3.saturating_add(-5), -2);
assert_eq!(MIN.saturating_add(-1), MIN);
assert_eq!((-2).saturating_add(-MAX), MIN);
}
#[test]
fn test_saturating_sub_int() {
use isize::{MIN,MAX};
assert_eq!(3.saturating_sub(5), -2);
assert_eq!(MIN.saturating_sub(1), MIN);
assert_eq!((-2).saturating_sub(MAX), MIN);
assert_eq!(3.saturating_sub(-5), 8);
assert_eq!(3.saturating_sub(-(MAX-1)), MAX);
assert_eq!(MAX.saturating_sub(-MAX), MAX);
assert_eq!((MAX-2).saturating_sub(-1), MAX-1);
}
#[test]
fn test_checked_add() {
let five_less = usize::MAX - 5;
assert_eq!(five_less.checked_add(0), Some(usize::MAX - 5));
assert_eq!(five_less.checked_add(1), Some(usize::MAX - 4));
assert_eq!(five_less.checked_add(2), Some(usize::MAX - 3));
assert_eq!(five_less.checked_add(3), Some(usize::MAX - 2));
assert_eq!(five_less.checked_add(4), Some(usize::MAX - 1));
assert_eq!(five_less.checked_add(5), Some(usize::MAX));
assert_eq!(five_less.checked_add(6), None);
assert_eq!(five_less.checked_add(7), None);
}
#[test]
fn test_checked_sub() {
assert_eq!(5_usize.checked_sub(0), Some(5));
assert_eq!(5_usize.checked_sub(1), Some(4));
assert_eq!(5_usize.checked_sub(2), Some(3));
assert_eq!(5_usize.checked_sub(3), Some(2));
assert_eq!(5_usize.checked_sub(4), Some(1));
assert_eq!(5_usize.checked_sub(5), Some(0));
assert_eq!(5_usize.checked_sub(6), None);
assert_eq!(5_usize.checked_sub(7), None);
}
#[test]
fn test_checked_mul() {
let third = usize::MAX / 3;
assert_eq!(third.checked_mul(0), Some(0));
assert_eq!(third.checked_mul(1), Some(third));
assert_eq!(third.checked_mul(2), Some(third * 2));
assert_eq!(third.checked_mul(3), Some(third * 3));
assert_eq!(third.checked_mul(4), None);
}
macro_rules! test_is_power_of_two {
($test_name:ident, $T:ident) => (
fn $test_name() {
#![test]
assert_eq!((0 as $T).is_power_of_two(), false);
assert_eq!((1 as $T).is_power_of_two(), true);
assert_eq!((2 as $T).is_power_of_two(), true);
assert_eq!((3 as $T).is_power_of_two(), false);
assert_eq!((4 as $T).is_power_of_two(), true);
assert_eq!((5 as $T).is_power_of_two(), false);
assert_eq!(($T::MAX / 2 + 1).is_power_of_two(), true);
}
)
}
test_is_power_of_two!{ test_is_power_of_two_u8, u8 }
test_is_power_of_two!{ test_is_power_of_two_u16, u16 }
test_is_power_of_two!{ test_is_power_of_two_u32, u32 }
test_is_power_of_two!{ test_is_power_of_two_u64, u64 }
test_is_power_of_two!{ test_is_power_of_two_uint, usize }
macro_rules! test_next_power_of_two {
($test_name:ident, $T:ident) => (
fn $test_name() {
#![test]
assert_eq!((0 as $T).next_power_of_two(), 1);
let mut next_power = 1;
for i in range::<$T>(1, 40) {
assert_eq!(i.next_power_of_two(), next_power);
if i == next_power { next_power *= 2 }
}
}
)
}
test_next_power_of_two! { test_next_power_of_two_u8, u8 }
test_next_power_of_two! { test_next_power_of_two_u16, u16 }
test_next_power_of_two! { test_next_power_of_two_u32, u32 }
test_next_power_of_two! { test_next_power_of_two_u64, u64 }
test_next_power_of_two! { test_next_power_of_two_uint, usize }
macro_rules! test_checked_next_power_of_two {
($test_name:ident, $T:ident) => (
fn $test_name() {
#![test]
assert_eq!((0 as $T).checked_next_power_of_two(), Some(1));
assert!(($T::MAX / 2).checked_next_power_of_two().is_some());
assert_eq!(($T::MAX - 1).checked_next_power_of_two(), None);
assert_eq!($T::MAX.checked_next_power_of_two(), None);
let mut next_power = 1;
for i in range::<$T>(1, 40) {
assert_eq!(i.checked_next_power_of_two(), Some(next_power));
if i == next_power { next_power *= 2 }
}
}
)
}
test_checked_next_power_of_two! { test_checked_next_power_of_two_u8, u8 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_u16, u16 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_u32, u32 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_u64, u64 }
test_checked_next_power_of_two! { test_checked_next_power_of_two_uint, usize }
#[derive(PartialEq, Debug)]
struct Value { x: isize }
impl ToPrimitive for Value {
fn to_i64(&self) -> Option<i64> { self.x.to_i64() }
fn to_u64(&self) -> Option<u64> { self.x.to_u64() }
}
impl FromPrimitive for Value {
fn from_i64(n: i64) -> Option<Value> { Some(Value { x: n as isize }) }
fn from_u64(n: u64) -> Option<Value> { Some(Value { x: n as isize }) }
}
#[test]
fn test_to_primitive() {
let value = Value { x: 5 };
assert_eq!(value.to_int(), Some(5));
assert_eq!(value.to_i8(), Some(5));
assert_eq!(value.to_i16(), Some(5));
assert_eq!(value.to_i32(), Some(5));
assert_eq!(value.to_i64(), Some(5));
assert_eq!(value.to_uint(), Some(5));
assert_eq!(value.to_u8(), Some(5));
assert_eq!(value.to_u16(), Some(5));
assert_eq!(value.to_u32(), Some(5));
assert_eq!(value.to_u64(), Some(5));
assert_eq!(value.to_f32(), Some(5f32));
assert_eq!(value.to_f64(), Some(5f64));
}
#[test]
fn test_from_primitive() {
assert_eq!(from_int(5), Some(Value { x: 5 }));
assert_eq!(from_i8(5), Some(Value { x: 5 }));
assert_eq!(from_i16(5), Some(Value { x: 5 }));
assert_eq!(from_i32(5), Some(Value { x: 5 }));
assert_eq!(from_i64(5), Some(Value { x: 5 }));
assert_eq!(from_uint(5), Some(Value { x: 5 }));
assert_eq!(from_u8(5), Some(Value { x: 5 }));
assert_eq!(from_u16(5), Some(Value { x: 5 }));
assert_eq!(from_u32(5), Some(Value { x: 5 }));
assert_eq!(from_u64(5), Some(Value { x: 5 }));
assert_eq!(from_f32(5f32), Some(Value { x: 5 }));
assert_eq!(from_f64(5f64), Some(Value { x: 5 }));
}
#[test]
fn test_pow() {
fn naive_pow<T: Int>(base: T, exp: usize) -> T {
let one: T = Int::one();
(0..exp).fold(one, |acc, _| acc * base)
}
macro_rules! assert_pow {
(($num:expr, $exp:expr) => $expected:expr) => {{
let result = $num.pow($exp);
assert_eq!(result, $expected);
assert_eq!(result, naive_pow($num, $exp));
}}
}
assert_pow!((3, 0 ) => 1);
assert_pow!((5, 1 ) => 5);
assert_pow!((-4, 2 ) => 16);
assert_pow!((8, 3 ) => 512);
assert_pow!((2u64, 50) => 1125899906842624);
}
#[test]
fn test_uint_to_str_overflow() {
let mut u8_val: u8 = 255;
assert_eq!(u8_val.to_string(), "255");
u8_val = u8_val.wrapping_add(1);
assert_eq!(u8_val.to_string(), "0");
let mut u16_val: u16 = 65_535;
assert_eq!(u16_val.to_string(), "65535");
u16_val = u16_val.wrapping_add(1);
assert_eq!(u16_val.to_string(), "0");
let mut u32_val: u32 = 4_294_967_295;
assert_eq!(u32_val.to_string(), "4294967295");
u32_val = u32_val.wrapping_add(1);
assert_eq!(u32_val.to_string(), "0");
let mut u64_val: u64 = 18_446_744_073_709_551_615;
assert_eq!(u64_val.to_string(), "18446744073709551615");
u64_val = u64_val.wrapping_add(1);
assert_eq!(u64_val.to_string(), "0");
}
fn from_str<T: ::str::FromStr>(t: &str) -> Option<T> {
::str::FromStr::from_str(t).ok()
}
#[test]
fn test_uint_from_str_overflow() {
let mut u8_val: u8 = 255;
assert_eq!(from_str::<u8>("255"), Some(u8_val));
assert_eq!(from_str::<u8>("256"), None);
u8_val = u8_val.wrapping_add(1);
assert_eq!(from_str::<u8>("0"), Some(u8_val));
assert_eq!(from_str::<u8>("-1"), None);
let mut u16_val: u16 = 65_535;
assert_eq!(from_str::<u16>("65535"), Some(u16_val));
assert_eq!(from_str::<u16>("65536"), None);
u16_val = u16_val.wrapping_add(1);
assert_eq!(from_str::<u16>("0"), Some(u16_val));
assert_eq!(from_str::<u16>("-1"), None);
let mut u32_val: u32 = 4_294_967_295;
assert_eq!(from_str::<u32>("4294967295"), Some(u32_val));
assert_eq!(from_str::<u32>("4294967296"), None);
u32_val = u32_val.wrapping_add(1);
assert_eq!(from_str::<u32>("0"), Some(u32_val));
assert_eq!(from_str::<u32>("-1"), None);
let mut u64_val: u64 = 18_446_744_073_709_551_615;
assert_eq!(from_str::<u64>("18446744073709551615"), Some(u64_val));
assert_eq!(from_str::<u64>("18446744073709551616"), None);
u64_val = u64_val.wrapping_add(1);
assert_eq!(from_str::<u64>("0"), Some(u64_val));
assert_eq!(from_str::<u64>("-1"), None);
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::Bencher;
use num::Int;
use prelude::v1::*;
#[bench]
fn bench_pow_function(b: &mut Bencher) {
let v = (0..1024).collect::<Vec<_>>();
b.iter(|| {v.iter().fold(0, |old, new| old.pow(*new as u32));});
}
}<|fim▁end|> | ///
/// // (8388608, -22, 1) |
<|file_name|>gameprofile.py<|end_file_name|><|fim▁begin|># Orbotor - arcade with orbit mechanics
# Copyright (C) 2014 mr555ru
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys # NOQA
import profile
import ConfigParser
import pygame
from pygame import *
from static_functions import *
import camera as camera
import planet as planet
from orbitable import GCD_Singleton, SoundSystem_Singleton
from helldebris_collection import HellDebrisCollection
from team import Team
from simplestats import SimpleStats
wwidth = 1024
wheight = 768
p1_name = "Player1"
p2_name = "Player2"
config = ConfigParser.RawConfigParser()
config.read('profile.cfg')
wwidth = config.getint("Screen", "width")
wheight = config.getint("Screen", "height")
p1_name = config.get("Player", "P1_name")
p2_name = config.get("Player", "P2_name")
display = (wwidth, wheight)
clock = pygame.time.Clock()
class Profile():
def __init__(self, is_player2_present=False,
is_player1_ai=False,
is_player2_ai=False,
player1_team="Green",
player2_team="Red",
greenteamsize=8,
redteamsize=8,
debris_min=6,
debris_max=20,
draw_planet=False,
name=""):
self.p2 = is_player2_present
self.p1_ai = is_player1_ai
self.p2_ai = is_player2_ai
self.p1_team = player1_team
self.p2_team = player2_team
mingreen = int(self.p1_team == "Green") + int(self.p2_team == "Green" and self.p2)
minred = int(self.p1_team == "Red") + int(self.p2_team == "Red" and self.p2)
self.green = max(mingreen, greenteamsize)
self.red = max(minred, redteamsize)
self.hellmin = debris_min
self.hellmax = debris_max
self.draw_planet = draw_planet
self.name = name
self.ERAD = 1000
self.MAXRAD = 1700
self.ORBHEIGHT = 350
def game_init(self):
pygame.init()
self.PROFILESTEP = False
self.UPDAE_GAME = pygame.USEREVENT + 1
pygame.time.set_timer(self.UPDAE_GAME, GAME_SPEED)
self.screen = pygame.display.set_mode(display)
if self.p2:
self.bg1 = Surface((wwidth, wheight/2))
self.bg2 = Surface((wwidth, wheight/2))
self.cam2 = camera.Camera(self.bg2, first_in_order=False)
self.bgs = (self.bg1, self.bg2)
else:
self.bg1 = Surface((wwidth, wheight))
self.bgs = (self.bg1,)
self.cam1 = camera.Camera(self.bg1)
if self.name == "":
pygame.display.set_caption("Orbotor")
else:
pygame.display.set_caption("Orbotor - %s" % self.name)
self.pl = planet.Planet(self.bgs, self.ERAD, self.MAXRAD, "planet.png" if self.draw_planet else None)
GCD_Singleton.set_planet(self.pl)
self.soundsys = SoundSystem_Singleton
self.spawn = (self.ERAD+self.ORBHEIGHT, 0)
self.team1 = Team("Green", "#009900", self.green, self.spawn, self.pl)
self.team2 = Team("Red", "#880000", self.red, self.spawn, self.pl)
self.team1.set_opponent_team(self.team2)
self.team2.set_opponent_team(self.team1)
self.hell = HellDebrisCollection(self.spawn, self.pl, self.hellmin, self.hellmax)
if self.p1_team == "Green":
self.player1 = self.team1.guys[0]
if self.p2:
if self.p2_team == "Green":
self.player2 = self.team1.guys[1]
elif self.p2_team == "Red":
self.player2 = self.team2.guys[0]
else:
raise Exception("unknown team for p2: %s" % self.p2_team)
elif self.p1_team == "Red":
self.player1 = team2.guys[0]
if self.p2:
if self.p2_team == "Green":
self.player2 = self.team1.guys[0]
elif self.p2_team == "Red":
self.player2 = self.team2.guys[1]
else:
raise Exception("unknown team for p2: %s" % self.p2_team)
else:
raise Exception("unknown team for p1: %s" % self.p1_team)
self.player1.is_ai = self.p1_ai
if self.p1_ai:
self.player1.set_name("[bot] %s" % p1_name)
else:
self.player1.set_name("%s" % p1_name)
if self.p2:
self.player2.is_ai = self.p2_ai
if self.p2_ai:
self.player2.set_name("[bot] %s" % p2_name)
else:
self.player2.set_name("%s" % p2_name)
self.stats1 = SimpleStats(self.team1, self.team2, self.player1)
if self.p2:
self.stats2 = SimpleStats(self.team1, self.team2, self.player2)
def game_key_listen(self, event):
if event.type == KEYDOWN and event.key == K_F1:
self.PROFILESTEP = True
self.game_step()
elif event.type == KEYDOWN and event.key == K_F2:
print len(GCD_Singleton.orbitables)
elif event.type == KEYDOWN and event.key == K_F5:
self.soundsys.switch()
if not self.p1_ai:
self.player1.catch_kb_event(event)
if self.p2 and not self.p2_ai:
self.player2.catch_kb_event_hotseat(event)
self.cam1.keys_listen(event)
if self.p2:
self.cam2.keys_listen_hotseat(event)
def game_step(self):
if self.PROFILESTEP:
profile.runctx("self._step()", globals(), {"self": self})
else:
self._step()
def _step(self):
self.team2.step() # todo faster better stronger
self.team1.step()
self.hell.step()
self.player1.focus(self.cam1)
self.cam1.step()
if self.p2:
self.player2.focus(self.cam2)
self.cam2.step()
GCD_Singleton.step()
def game_draw(self):
if self.PROFILESTEP:
profile.runctx("self._draw()", globals(), {"self": self})
self.PROFILESTEP = False
else:
self._draw()
def _draw(self):
clock.tick(60)
tup = [self.pl, ] + self.team1.objectslist() + self.team2.objectslist()\
+ self.hell.objectslist() + self.pl.cities
tup = tuple(tup)
self.cam1.translate_coords(*tup)
if self.p2:
self.cam2.translate_coords(*tup)
self.stats1.draw(self.bg1)
self.screen.blit(self.bg1, (0, 0))
if self.p2:
self.stats2.draw(self.bg2)
self.screen.blit(self.bg2, (0, wheight/2))<|fim▁hole|>
def DefaultProfile(draw_planet, hell):
return Profile(draw_planet=draw_planet, debris_min=hell[0], debris_max=hell[1])
def HotseatProfile(draw_planet, hell):
return Profile(is_player2_present=True, draw_planet=draw_planet,
debris_min=hell[0], debris_max=hell[1])
def RivalProfile(draw_planet, hell):
return Profile(is_player2_present=True, is_player2_ai=True, draw_planet=draw_planet,
debris_min=hell[0], debris_max=hell[1])
def CoopProfile(draw_planet, hell):
return Profile(is_player2_present=True, player2_team="Green", draw_planet=draw_planet,
debris_min=hell[0], debris_max=hell[1])
def SpectateProfile(draw_planet, hell):
return Profile(is_player1_ai=True, draw_planet=draw_planet,
debris_min=hell[0], debris_max=hell[1])
def SurvivalProfile(draw_planet):
return Profile(draw_planet=draw_planet, debris_min=35, debris_max=70,
greenteamsize=1, redteamsize=0)
def CoopSurvivalProfile(draw_planet):
return Profile(is_player2_present=True, player2_team="Green", draw_planet=draw_planet,
debris_min=35, debris_max=70, greenteamsize=2, redteamsize=0)<|fim▁end|> |
pygame.display.update() |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2014-2015 University of Chicago
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Repository Management
"""
import datetime
import fnmatch
import hashlib
import os
import os.path
import re
default_root = "/mcs/globus.org/ftppub/gt6"
default_api_root = "/mcs/globus.org/api"
default_releases = ["unstable", "testing", "stable"]
public_key = """-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1.4.5 (GNU/Linux)
mQGiBE0PXQkRBAC12PfwFzMyTKAvCp3AEbzdwwDyEaBHYmd1+Dv+q5c48fEZQrzA
PuZ75BnG8BRIo3ZSYJll9Xf5v8A0M6F35msBBdjUpI+PHZvSQ+yru6U3w9XCsmO9
jSGWM1XAw/hcDWOsETOsjJ56AqIKndOXtG2jeOMFD0MwJus9paDcv5pPkwCgk3Fk
I+GdLaZf0O6vGUtq2Fo2EgkD/14AQ4SyUufwztQeLwlYXyihdUoIVBl4wm4fndJb
TuzTlp3V/oabM8t+V92ftbqlAesFb1FdFQ9NeUEY0VIODR2OTsmEfLUSMK/kRfXM
4FatObXpEp58EydZb3oz/vwASEk1Nno5OW2noGZL3sCk+3j65MstI2q4kMvNSvl+
JEjUBACgNv/mDrn0UjWBuzxOuZrh1r2rBdsjIHx31o/vBF5YLfQhErZQTm6cfpRK
W32Nm18btrqgxxHFAMb4wxnVxAxdM3zLSAaiqvi33z2wHReh5TfaVKpJBj7LpMSI
hwu50iovsBjE7HiusJQBWBtk8Bqp4g9ic2sPV0caEMCUXU5R9bQjR2xvYnVzIFRv
b2xraXQgPHN1cHBvcnRAZ2xvYnVzLm9yZz6IYAQTEQIAIAUCTQ9dCQIbAwYLCQgH
AwIEFQIIAwQWAgMBAh4BAheAAAoJEESufsL68kNlb6IAoIemS8dr65xCkA4GQzgJ
ngXwZgtvAKCOKs5Ork6HiNKIrWRGMLvA7iktBbkCDQRND10SEAgA37cRQGj/QNcc
OjyBrL6e2wPT7UtpXBEHzfjhtmT8+VC+PSbKRxVfawLBtrfzSAAwsmye3c+XK/VB
Pa06vSSmezeyNau+XtEVLwrwQwO/kM6wgNtb7zYyI67Y6XEPP+ZlqpZ0W14cTZBD
3SXWuu6zqjdtUnJCg/j/j0zH5TZa40aCfisERxNCQeoePk2gmMTJDJF0ASM3Nhys
QIP9qpCA+eOJnKmMeEgDCW9j2mYO4tp9lCSbi15HAb41HKN6xypNWk+EHKyu9n50
88UocRHXLZFujzNTGIokWAcoC0D3qpVQehtAVgt1VPrE6MxFPek8ZN4Ho++92KB7
F6E0OsfF6wADBggAnNPguzYAIztF/EzZANUU/7Eon9zJaD4Lf/mnhB3bMuGvenY0
7HSBAXbUxVXs7uX3S6u9PZ9dytl2Fqh8w47TNcC0ACKLRnhxTJ92LLakzAGVGtNz
2W9l+YJaZ6qIQR9FmYpCyIWp6Vm47yOARThrMtnwUhb53g5ZfxgzpHNUDN/7utTy
3sUaMRiijecmSVhDFbrz7ryY2Btlcr7ZrBo0ODHohDkZVn2UrzE6qg9g5np03zYe
5OUM5Lt5GYZJSKZO81aJ5+9DlkiAev3BFEeCsSOwjrqLZpsr0olbIfeHCi8pvjOJ
SCfx4Qs/hI34ykaUn3AgbgxqT0mSKfMasg2bIIhJBBgRAgAJBQJND10SAhsMAAoJ
EESufsL68kNlBuAAnRRI5jFAvyjtQaoQpVqSL4/O45D7AJ9WrW/vxTzN0OyZyUU6
8T0dJyXArA==
=r6rU
-----END PGP PUBLIC KEY BLOCK-----
"""
uid = os.getuid()
gid = None
def _digest_file(filename, force=False):
"""
Compute the md5, sha1, sha512 hashes of a file and write them to disk.
Parameters
----------
*filename*::
Name of the file to compute the hash of (str)
*force*::
Overwrite existing hash file (bool [False])
"""
if fnmatch.fnmatch(filename, "*.md5") or \
fnmatch.fnmatch(filename, "*.sha1") or \
fnmatch.fnmatch(filename, "*.sha512"):
return
for h in ['md5', 'sha1', 'sha512']:
hashname = filename + "." + h
if (force
or not os.path.exists(hashname)
or os.path.getmtime(filename) > os.path.getmtime(hashname)):
digester = hashlib.new(h)
f = file(filename, "r")
digester.update(f.read())
f.close()
f = file(hashname, "w")
f.write(
"%s %s\n" %
(digester.hexdigest(), filename.split(os.sep)[-1]))
f.close()
class Repository(object):
"""
Repository class
===================
This class contains the generic package management features for the various
metadata types associated with different repository systems. It contains
algorithms for matching packages and selecting ones to copy into another
repository based on version matches. This is subclassed to implement the
actual metdata parsing for various metadata formats.
"""
def __init__(self):
self.packages = {}
def get_packages(
self, name=None, arch=None, version=None, source=None,
newest_only=False):
"""
Construct a list of packages that match the optional parameters. If
source is an Metadata object, match packages that have that package
as the source package. Otherwise, filter the package list based on
the name if not None, further filtering on version and arch if they
are not None. If newest_only is True, only return the highest versions
of the packages which match
"""
package_candidates = []
if source is not None:
return [
(package)
for package_list in self.packages
for package in self.packages[package_list]
if package.source_name == source.source_name
and package.version == source.version
]
elif name is not None:
if version is not None:
package_candidates = [
(package)
for package_list in self.packages
for package in self.packages[package_list]
if name == package.name
and package.version == version
]
else:
package_candidates = [
(package)
for package_list in self.packages
for package in self.packages[package_list]
if name == package.name
]
if arch is not None:
package_candidates = [
(p)
for p in package_candidates if p.arch == arch
]
if newest_only and len(package_candidates) > 0:
newv = package_candidates[-1].version
return [p for p in package_candidates if p.version == newv]
elif newest_only:<|fim▁hole|> package_candidates = []
for n in self.packages:
package_candidates.extend(
self.get_packages(
name=n, arch=arch, newest_only=newest_only))
return package_candidates
def is_newer(self, pkg):
"""
Check to see if *pkg* is newer than any versions of the same package
name within this repository. Returns 'True' if it is, 'False'
otherwise.
Parameters
----------
*self*:
This Repository object
*pkg*:
Package metadata to compare against the versions in *self*.
Returns
-------
Boolean
"""
matches = self.get_packages(pkg.name, arch=pkg.arch, newest_only=True)
return matches == [] or pkg > matches[-1]
def __contains__(self, pkg):
"""
Check to see if pkg is included in this Repository
"""
return len(self.get_packages(
name=pkg.name, arch=pkg.arch,
version=pkg.version, newest_only=True)) > 0
def __iter__(self):
"""
Iterate through the packages in this repository
"""
return self.packages.keys()
@staticmethod
def create_index(path, recursive=False):
for root, dirs, filenames in os.walk(path, topdown=not recursive):
if not recursive:
del dirs[0:]
indexfile = os.path.join(root, "index.html")
index_mtime = 0
regenerate_index = False
if os.path.exists(indexfile):
index_mtime = os.stat(indexfile).st_mtime
else:
regenerate_index = True
if not regenerate_index:
for dir in dirs:
fulldir = os.path.join(root, dir)
if os.stat(fulldir).st_mtime >= index_mtime:
regenerate_index = True
break
if not regenerate_index:
for filename in filenames:
fullfilename = os.path.join(root, filename)
if os.stat(fullfilename).st_mtime >= index_mtime:
regenerate_index = True
break
if regenerate_index:
try:
f = open(indexfile, "w")
f.write(
"<html><head><title>{0}</title></head>\n"
"<body>"
"<table>\n"
"<tr>"
"<td><a href='../index.html'>Parent Directory</a></td>"
"<td>{1}</td></tr>\n"
.format(
os.path.basename(root),
datetime.datetime.fromtimestamp(
os.stat(
os.path.join(
root, "..")).st_mtime).isoformat()))
dirs.sort()
for dir in dirs:
f.write(
"<tr>"
"<td><a href='{0}/index.html'>{0}/</a></td>"
"<td>{1}/</td></tr>\n"
.format(
dir,
datetime.datetime.fromtimestamp(
os.stat(
os.path.join(root, dir)).st_mtime
).isoformat()))
filenames.sort()
for pkg in filenames:
pkg_filename = os.path.join(root, pkg)
if (os.path.isfile(pkg_filename)
and not pkg_filename.endswith(".html")):
f.write(
"<tr>"
"<td><a href='{0}'>{0}</a></td>"
"<td>{1}</td></tr>\n"
.format(
pkg,
datetime.datetime.fromtimestamp(
os.stat(
pkg_filename).st_mtime
).isoformat()))
f.write("</table></body></html>\n")
finally:
f.close()
os.utime(root, None)
class Release(object):
"""
A Release is a top-level collection of +repo.Repository+ objects for
particular package stability ('unstable', 'testing', 'stable')
for each operating system.
"""
def __init__(self, name, repositories):
self.name = name
self.repositories = repositories
def get_packages(
self, name=None, os=None, version=None, arch=None,
source=None, newest_only=False):
return [p
for repository in self.repositories_for_os_arch(os, arch)
for p in repository.get_packages(
name=name, arch=arch, version=version, source=source,
newest_only=newest_only)]
def is_newer(self, package):
for repository in self.repositories_for_package(package):
if repository.is_newer(package):
return True
return False
def add_package(self, package, update_metadata=False):
return [
repository.add_package(package, update_metadata)
for repository in self.repositories_for_package(package)]
def remove_package(self, package, update_metadata=False):
return [
repository.remove_package(package, update_metadata)
for repository in self.repositories_for_package(package)]
def update_metadata(self, osname=None, arch=None, force=False):
for repository in self.repositories_for_os_arch(osname, arch):
repository.update_metadata(force)
def repositories_for_os_arch(self, osname, arch):
if osname is not None:
if arch is not None:
return [self.repositories[osname][arch]]
else:
return [
self.repositories[osname][ar]
for ar in self.repositories[osname]
]
else:
return [
self.repositories[osn][ar]
for osn in self.repositories
for ar in self.repositories[osn]
]
def repositories_for_package(self, package):
"""
Returns a list of repositories where the given package would belong.
By default, its a list containing the repository that matches the
package's os and arch, but subclasses can override this
"""
if package.os in self.repositories:
return [self.repositories[package.os][package.arch]]
else:
return []
def get_operating_systems(self):
return self.repositories.keys()
def get_architectures(self, osname):
return self.repositories[osname].keys()
def __contains__(self, package):
return len(self.get_packages(
name=package.name, os=package.os, version=package.version,
arch=package.arch)) > 0
class Manager(object):
def __init__(self, releases):
self.releases = releases
def get_release(self, releasename):
return self.releases[releasename]
def package_name(self, name):
return name.replace("_", "-") if name is not None else None
def promote_packages(
self, from_release=None,
to_release="unstable", os=None, name=None, version=None,
dryrun=False, exclude_package_names=None):
"""
Find new packages in the *from_release*, that are not in *to_release*
and copy them there and update the distro metadata. The packages to
promote
can be limited by specifying the package *name*, *version*, and
particular *os* to update.
Parameters
----------
*from_release*::
The name of a release in this Manager object to copy new packages
from.
*to_release*::
The name of a release in this Manager object
to copy new packages into.
*os*::
Optional operating system indicator (either version or codename)
to restrict the package promotion to.
*name*::
Optional name of the packages to copy. If this is not present, all
packages that have a newer source version in *from_release* than
*to_release* are copied.
*version*::
Optional version of the packages to copy. This is only used if the
*name* option is used to additionally limit the packages to copy.
*dryrun*::
(Optional) Boolean whether to prepare to promote the packages or
just compute which packages are eligible for promotion.
*exclude_package_names*::
(Optional) List of regular expressions matching packages to
exclude from the promotion list.
Returns
-------
This function returns a list of packages that were promoted
(or would have been if dryrun=False)
"""
from_release = self.get_release(from_release)
# Find source packages in the from_release that are newer versions than
# those in the to_release
src_candidates = [src_info for src_info in from_release.get_packages(
name=self.package_name(name), os=os, version=version,
newest_only=(version is None))]
src_candidates_by_os = {}
for src in src_candidates:
source_and_os = "{0}:{1}".format(src.source_name, src.os)
if (source_and_os not in src_candidates_by_os
or src_candidates_by_os[source_and_os].version
< src.version):
src_candidates_by_os[source_and_os] = src
src_candidates = [
src_candidates_by_os[x] for x in src_candidates_by_os
]
result = []
seen = {}
to_release_object = self.get_release(to_release)
# For each package found above, find source and binaries in
# from_release and copy them over if they are not in to_release
for src in src_candidates:
source_and_os = "{0}:{1}".format(src.source_name, src.os)
if source_and_os not in seen:
seen[source_and_os] = True
for package in from_release.get_packages(source=src):
skip = False
if exclude_package_names is not None:
for exclude in exclude_package_names:
if re.match(exclude, package.name) is not None:
skip = True
break
if (not skip) and to_release_object.is_newer(package):
if not dryrun:
to_release_object.add_package(
package, update_metadata=False)
result.append(package)
if not dryrun:
to_release_object.update_metadata()
return result
# vim: filetype=python:<|fim▁end|> | return []
else:
return package_candidates
else: |
<|file_name|>levenshtein.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2011 Andrei Mackenzie
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
// Compute the edit distance between the two given strings
exports.getEditDistance = function(a, b){<|fim▁hole|> var matrix = [];
// increment along the first column of each row
var i;
for(i = 0; i <= b.length; i++){
matrix[i] = [i];
}
// increment each column in the first row
var j;
for(j = 0; j <= a.length; j++){
matrix[0][j] = j;
}
// Fill in the rest of the matrix
for(i = 1; i <= b.length; i++){
for(j = 1; j <= a.length; j++){
if(b.charAt(i-1) == a.charAt(j-1)){
matrix[i][j] = matrix[i-1][j-1];
} else {
matrix[i][j] = Math.min(matrix[i-1][j-1] + 1, // substitution
Math.min(matrix[i][j-1] + 1, // insertion
matrix[i-1][j] + 1)); // deletion
}
}
}
return matrix[b.length][a.length];
};<|fim▁end|> | if(a.length == 0) return b.length;
if(b.length == 0) return a.length;
|
<|file_name|>exercise.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from __future__ import unicode_literals
import os
def wait():
raw_input('\nPress Enter to continue...\n\n')
os.system(['clear', 'cls'][os.name == 'nt'])
# Create a class to handle items in a wallet
class BaseWalletHandler(object):
def __init__(self):
self.items = {
'Driver\'s License': False,
'Credit Card': False,
'Cash': False,
'Change': False,
'Insurance Card': False,
'ICE Info': False,
'Pictures': False,
}
def add_item(self, item):
if item in self.items.keys():
self.items[item] = True
def remove_item(self, item):
if item in self.items.keys():
self.items[item] = False
def show_items(self):
for key, value in self.items.items():
if value is True:
print key
# Can more refactoring happen to clean this up more?
class WalletHandler(BaseWalletHandler):
def __init__(self):
super(WalletHandler, self).__init__()
def add_item(self, item):
super(WalletHandler, self).add_item(item)
if item not in self.items.keys():
self.items[item] = True
def exercise():
wallet_handler = BaseWalletHandler()<|fim▁hole|> wallet_handler.add_item('ICE Info')
wallet_handler.add_item('Credit Card')
wallet_handler.add_item('Business Card')
wallet_handler.show_items()
wait()
wallet_handler = WalletHandler()
wallet_handler.add_item('Driver\'s License')
wallet_handler.add_item('ICE Info')
wallet_handler.add_item('Credit Card')
wallet_handler.add_item('Business Card')
wallet_handler.show_items()
wait()
if __name__=='__main__':
exercise()<|fim▁end|> | wallet_handler.add_item('Driver\'s License') |
<|file_name|>DashboardCtrl.js<|end_file_name|><|fim▁begin|>define(['modules/dashboard/module'], function (module) {
"use strict";
module.registerController('DashboardCtrl', ['$scope', '$log', '$moment', 'Socket', 'toastr', 'Device', function ($scope, $log, $moment, Socket, toastr, Device) {
$(".view").css("min-height", $(window).height() - $('.header').height() - 100);
$('#datatable-measures').DataTable({columns: [{ "targets": 'measure.value', "title": "Value mg/dL", "data": 'value', "type": "number"},
{ "targets": 'measure.date', "title": "Date", "data": 'date', "type": "date", "render": function ( data, type, full, meta ) {
if (data != undefined && data != null)
return $moment(data).format('DD/MM/YYYY HH:mm');
else
return null;
}}],
fnRowCallback: function( nRow, aData, iDisplayIndex, iDisplayIndexFull ) {
if ( aData.value < "60" ) {
$('td', nRow).css('background-color', '#E91632');
}
else if ( aData.value > "120" ) {
$('td', nRow).css('background-color', '#E91632');
}
}});
// set textbox filter style
$('.dataTables_filter input').attr('type', 'text');
// get push event: measure
Socket.on('events', function(ptname) {
getMeasures();
})
$scope.$parent.onDeviceChange = function(device) {
// inject the datasource to datatable
$('table').dataTable().fnClearTable();
$('table').dataTable().fnAddData(device.measures);
//if ($scope.device.measures.length > 0) {
// var oTT = $('table').dataTable().fnGetInstance( 'datatable-measures' );
//oTT.fnSelect($('table tbody tr')[0]);
//}
}
function getMeasures() {<|fim▁hole|> $('table').dataTable().fnClearTable();
if (value !== undefined) {
var devices = JSON.parse(angular.toJson(value))
// set head dashboard
$scope.$parent.devices = devices;
if (devices.length > 0) {
$scope.$parent.device = devices[0];
// inject the datasource to datatable
$('table').dataTable().fnAddData(devices[0].measures);
//if ($scope.device.measures.length > 0) {
// var oTT = $('table').dataTable().fnGetInstance( 'datatable-measures' );
//oTT.fnSelect($('table tbody tr')[0]);
//}
}
}
}, function(httpResponse) {
var error = httpResponse.data.error;
console.log('Error getting measures - ' + error.status + ": " + error.message);
});
}
getMeasures();
}])
});<|fim▁end|> | Device.getMeasures()
.$promise
.then(function(value, responseHeaders) { |
<|file_name|>0239_auto_20211110_1921.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2021-11-10 19:21
from __future__ import unicode_literals
from django.db import migrations, models
<|fim▁hole|>
dependencies = [
('osf', '0238_abstractprovider_allow_updates'),
]
operations = [
migrations.AddIndex(
model_name='schemaresponse',
index=models.Index(fields=['object_id', 'content_type'], name='osf_schemar_object__8cc95e_idx'),
),
]<|fim▁end|> |
class Migration(migrations.Migration): |
<|file_name|>realty_bundle_gmap.js<|end_file_name|><|fim▁begin|>var map;
$(document).ready(function(){
var a=document.getElementById('map');
map = new GMaps({el: a,
//el: '#map',
lat: 55.763585,
lng: 37.560883,
zoom: 12,
mapTypeId: google.maps.MapTypeId.ROADMAP,
zoomControl : true,
zoomControlOpt: {
style : 'SMALL',
position: 'TOP_LEFT'
}
});
var logo=$('#logo').attr('src');
//alert(logo);
var icon = {
url: '/bundles/realty/map_icon.png', // url
scaledSize: new google.maps.Size(40, 40), // scaled size
origin: new google.maps.Point(0,0), // origin
anchor: new google.maps.Point(0, 0) // anchor
};
//var icon='/bundles/realty/map_icon.png';<|fim▁hole|> var PagesAllGoogleMap_path=$('#PagesAllGoogleMap_path').val();
for (var i =0; i<markers.length;i++)
{
if (markers[i]['city'] && markers[i]['street'] ) {
var address=markers[i]['city']+', '+markers[i]['street']+', '+markers[i]['house'];
var image='';
if (markers[i]['image_path']) {
image='<img src="/'+markers[i]['image_path']+'">';
}
var price='';
if (markers[i]['price']) {
price='<p>Price: '+markers[i]['price']+' USD</p>';
}
var totalArea='';
if (markers[i]['totalArea']) {
totalArea='<p>Total Area: '+markers[i]['totalArea']+'m2</p>';
}
var floor='';
if (markers[i]['totalArea']) {
floor='<p>Floor: '+markers[i]['numberOfFloors']+'/'+markers[i]['floor']+'</p>';
}
var contentHtml='<div class="bubblewrap">' +
'<a style="display:block;text-decoration:none" href="'+PagesAllGoogleMap_path+markers[i]['path']+'">' +
'<div class="mapContainer">' +
'<div class="mapPhoto">' + image +
'</div>' +
'<div class="mapDataC">' +
'<p><i class="fa fa-map-marker" aria-hidden="true"></i>'+address+'</p>' +
'<p>'+totalArea+'</p>' +
'<p>'+floor+'</p>' +
'<p>'+price+'</p>' +
'</div>' +
'<div class="view_div"><i class="info_window_arrow fa fa-5x fa-angle-right"></i></div>' +
'</div>' +
'</a>' +
'</div>';
// ************************
// grnertae makers content theme hlml css
// ************************
mapMake(address,icon,contentHtml);
}
}
function mapMake(address, icon, contentHtml )
{
GMaps.geocode({
address: address,
callback: function(results, status, html1) {
if (status == 'OK') {
var latlng = results[0].geometry.location;
map.setCenter(latlng.lat(), latlng.lng());
map.addMarker({
lat: latlng.lat(),
lng: latlng.lng(),
// title: 'Lima',
icon: icon,
infoWindow: {
content: contentHtml
}
});
}
}
});
}
});<|fim▁end|> |
var markers=$('#PagesAllGoogleMap').val();
markers= JSON.parse(markers);
|
<|file_name|>SimpleErrorReporter.java<|end_file_name|><|fim▁begin|>/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino;
import java.util.ArrayList;
import java.util.List;
/**
* A simple {@link ErrorReporter} that collects warnings and errors and makes
* them accessible via {@link #errors()} and {@link #warnings()}.
*
*
*/
public class SimpleErrorReporter implements ErrorReporter {
private List<String> warnings = null;
private List<String> errors = null;
public void warning(String message, String sourceName, int line,
String lineSource, int lineOffset)
{
if (warnings == null) {
warnings = new ArrayList<String>();
}
warnings.add(formatDetailedMessage(
message, sourceName, line, lineSource, lineOffset));
}
public void error(String message, String sourceName, int line,
String lineSource, int lineOffset)
{
if (errors == null) {
errors = new ArrayList<String>();
}
errors.add(formatDetailedMessage(<|fim▁hole|>
public EvaluatorException runtimeError(
String message, String sourceName, int line, String lineSource,
int lineOffset)
{
return new EvaluatorException(
message, sourceName, line, lineSource, lineOffset);
}
/**
* Returns the list of errors, or {@code null} if there were none.
*/
public List<String> errors()
{
return errors;
}
/**
* Returns the list of warnings, or {@code null} if there were none.
*/
public List<String> warnings()
{
return warnings;
}
private String formatDetailedMessage(
String message, String sourceName, int line, String lineSource,
int lineOffset)
{
RhinoException e = new RhinoException(message);
if (sourceName != null) {
e.initSourceName(sourceName);
}
if (lineSource != null) {
e.initLineSource(lineSource);
}
if (line > 0) {
e.initLineNumber(line);
}
if (lineOffset > 0) {
e.initColumnNumber(lineOffset);
}
return e.getMessage();
}
}<|fim▁end|> | message, sourceName, line, lineSource, lineOffset));
} |
<|file_name|>public_api.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
export * from './slide-toggle-module';
export * from './slide-toggle';
export * from './mat-exports';<|fim▁end|> | |
<|file_name|>server_state.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (C) 2013 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import imp
import os
import threading
from ycmd.utils import ForceSemanticCompletion
from ycmd.completers.general.general_completer_store import (
GeneralCompleterStore )
from ycmd.completers.completer_utils import PathToFiletypeCompleterPluginLoader
class ServerState( object ):
def __init__( self, user_options ):
self._user_options = user_options
self._filetype_completers = {}
self._filetype_completers_lock = threading.Lock()
self._gencomp = GeneralCompleterStore( self._user_options )
@property
def user_options( self ):
return self._user_options
def Shutdown( self ):
with self._filetype_completers_lock:
for completer in self._filetype_completers.itervalues():
if completer:
completer.Shutdown()
self._gencomp.Shutdown()
def _GetFiletypeCompleterForFiletype( self, filetype ):
with self._filetype_completers_lock:
try:
return self._filetype_completers[ filetype ]
except KeyError:
pass
module_path = PathToFiletypeCompleterPluginLoader( filetype )
completer = None
supported_filetypes = [ filetype ]
if os.path.exists( module_path ):
module = imp.load_source( filetype, module_path )
completer = module.GetCompleter( self._user_options )
if completer:
supported_filetypes.extend( completer.SupportedFiletypes() )
for supported_filetype in supported_filetypes:
self._filetype_completers[ supported_filetype ] = completer
return completer
def GetFiletypeCompleter( self, current_filetypes ):
completers = [ self._GetFiletypeCompleterForFiletype( filetype )
for filetype in current_filetypes ]
for completer in completers:
if completer:
return completer
raise ValueError( 'No semantic completer exists for filetypes: {0}'.format(
current_filetypes ) )
def FiletypeCompletionAvailable( self, filetypes ):<|fim▁hole|> except:
return False
def FiletypeCompletionUsable( self, filetypes ):
return ( self.CurrentFiletypeCompletionEnabled( filetypes ) and
self.FiletypeCompletionAvailable( filetypes ) )
def ShouldUseGeneralCompleter( self, request_data ):
return self._gencomp.ShouldUseNow( request_data )
def ShouldUseFiletypeCompleter( self, request_data ):
"""
Determines whether or not the semantic completer should be called, and
returns an indication of the reason why. Specifically, returns a tuple:
( should_use_completer_now, was_semantic_completion_forced ), where:
- should_use_completer_now: if True, the semantic engine should be used
- was_semantic_completion_forced: if True, the user requested "forced"
semantic completion
was_semantic_completion_forced is always False if should_use_completer_now
is False
"""
filetypes = request_data[ 'filetypes' ]
if self.FiletypeCompletionUsable( filetypes ):
if ForceSemanticCompletion( request_data ):
# use semantic, and it was forced
return ( True, True )
else:
# was not forced. check the conditions for triggering
return ( self.GetFiletypeCompleter( filetypes ).ShouldUseNow(
request_data ), False )
# don't use semantic, ignore whether or not the user requested forced
# completion
return ( False, False )
def GetGeneralCompleter( self ):
return self._gencomp
def CurrentFiletypeCompletionEnabled( self, current_filetypes ):
filetype_to_disable = self._user_options[
'filetype_specific_completion_to_disable' ]
if '*' in filetype_to_disable:
return False
else:
return not all([ x in filetype_to_disable for x in current_filetypes ])<|fim▁end|> | try:
self.GetFiletypeCompleter( filetypes )
return True |
<|file_name|>IconPulser.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*<|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.volume;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.content.Context;
import android.view.View;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
public class IconPulser {
private static final float PULSE_SCALE = 1.1f;
private final Interpolator mFastOutSlowInInterpolator;
public IconPulser(Context context) {
mFastOutSlowInInterpolator = AnimationUtils.loadInterpolator(context,
android.R.interpolator.fast_out_slow_in);
}
public void start(final View target) {
if (target == null || target.getScaleX() != 1) return; // n/a, or already running
target.animate().cancel();
target.animate().scaleX(PULSE_SCALE).scaleY(PULSE_SCALE)
.setInterpolator(mFastOutSlowInInterpolator)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
target.animate().scaleX(1).scaleY(1).setListener(null);
}
});
}
}<|fim▁end|> | * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>GetCoinbaseAddress.java<|end_file_name|><|fim▁begin|>/*
* This file is part of RskJ
* Copyright (C) 2019 RSK Labs Ltd.
* (derived from ethereumJ library, Copyright (c) 2016 <ether.camp>)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License<|fim▁hole|>package co.rsk.pcc.blockheader;
import co.rsk.pcc.ExecutionEnvironment;
import org.ethereum.core.Block;
import org.ethereum.core.CallTransaction;
/**
* This implements the "getCoinbaseAddress" method
* that belongs to the BlockHeaderContract native contract.
*
* @author Diego Masini
*/
public class GetCoinbaseAddress extends BlockHeaderContractMethod {
private final CallTransaction.Function function = CallTransaction.Function.fromSignature(
"getCoinbaseAddress",
new String[]{"int256"},
new String[]{"bytes"}
);
public GetCoinbaseAddress(ExecutionEnvironment executionEnvironment, BlockAccessor blockAccessor) {
super(executionEnvironment, blockAccessor);
}
@Override
public CallTransaction.Function getFunction() {
return function;
}
@Override
protected Object internalExecute(Block block, Object[] arguments) {
return block.getCoinbase().getBytes();
}
}<|fim▁end|> | * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
|
<|file_name|>0003_snoopedelection_extra.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-14 17:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("election_snooper", "0002_auto_20170314_1754")]
operations = [
migrations.AddField(
model_name="snoopedelection",
name="extra",
field=models.TextField(blank=True),
)
]<|fim▁end|> | |
<|file_name|>declrender.ts<|end_file_name|><|fim▁begin|>///<reference path='refs.ts'/>
module TDev
{
export class DeclEntry
{
public icon = "svg:hammer,white,clip=50";
public iconArtId: string;
public color = "blue";
public description = "";
public classAdd = "";
constructor(public name:string) {
}
private getName() { return this.name; }
private getDescription() { return this.description; }
private nodeType() { return "declEntry"; }
public mkBox():HTMLElement
{
var r = DeclRender.mkBoxEx(this, this.nodeType());
r.className += this.classAdd;
return r;
}
public getIconArtId() { return this.iconArtId; }
public makeIntoAddButton()
{
this.icon = "svg:circlePlus,black";
this.color = "transparent";
this.classAdd += " navRound";
}
static mkSeeMore(lbl:string)
{
var d = new DeclEntry("see more options");
d.description = lbl
return d.mkBox()
}
}
export module DeclRender
{
//var libsIcon = ScriptIcon("Book");
//var libsBrush = new SolidColorBrush(Colors.Magenta); // TODO find good color
var artColor = "#ff0038";
var dataColor = "#ff7518";
var actionColor = "#E72A59";
var recordColor = "#800080";
var userDefinedColor = "#1B91E0";
export var declColor:any =
{
globalDef: (d:AST.GlobalDef) => {
var c = DeclRender.propColor(d);
if (c) return c;
return d.isResource ? artColor : dataColor;
},
recordDef: () => recordColor,
action: function (a: AST.Action) {
if (a.isEvent()) return "#007fff";
if (a.isPage()) return "#00008B";
if (a.isActionTypeDef()) return recordColor;
if (Util.cloudRun) {
a.canBeOffloadedCache = AST.CanBeOffloadedState.Unknown;
// recompute isOffloaded since the action or those it calls may change
a.isOffloaded = /^cloud/.test(a.getName()) || (a.isOffloaded && a.canBeOffloaded());
if (a.isOffloaded) return "#800000";
}
return actionColor;
},
libraryRef: (l:AST.LibraryRef) =>
l.resolved ? l.resolved.htmlColor() : "#48A300",
/*
globalDef: (d:AST.GlobalDef) => d.isResource ? "#A4500F" : "#FD882E",
tableDef: () => "#E72A59",
action: (a:AST.Action) => a.isEvent() ? "#41900D" : "#70DE29",
*/
app: (a:AST.App) => a.htmlColor(),
localDef: () => "#E72A59",
kind: (k:Kind) =>
k.isAction ? "#007fff" : k.isUserDefined() ? recordColor : k.getParameterCount() > 0 ? "#c22" : "#40B619",
singletonDef: function(s:AST.SingletonDef) {
if (s.getKind() instanceof ThingSetKind)
return userDefinedColor;
return "#40B619";
/*
switch (s.getName()) {
case "data": return dataColor;
case "art": return artColor;
case "code": return actionColor;
default: return "#40B619";
}
*/
},
recordField: () => "#0d2",
property: function(p:IProperty) {
var c = DeclRender.propColor(p);
if (c)
return c;
else if (p.forwardsToStmt() instanceof AST.RecordField)
return "#0d2";
else if (p.parentKind.isData)
return dataColor;
else if (p instanceof MultiplexProperty)
return declColor.kind((<MultiplexProperty> p).forKind);
else
return declColor.kind(p.getResult().getKind());
/*
if (p.getResult().getKind() == api.core.Nothing)
return actionColor;
else {
return artColor;
var pp = p.getParameters();
var len = pp.length;
if (!pp[0].getKind().isData) len--;
if (len == 0) return artColor;
else if (len == 1) return dataColor;
else return "#800080";
}
*/
},
declEntry: (d:DeclEntry) => d.color,
codeLocation: () => "#1B91E0",
codeLocationCurr: () => "#E31B78",
codeLocationLib: () => "#48A300",
};
export function propColor(p:IProperty)
{
if (p && p.getResult().getKind() == api.core.Color) {
var m = /#([a-fA-F0-9]+)/.exec(p.getDescription());
if (m && m[1].length >= 6) {
return "#" + m[1].slice(-6);
}
}
return null;
}<|fim▁hole|> }
function appCloudColor(app:AST.App, icon:string)
{
if (!app) return icon
if (!icon) icon = ArtUtil.artUrl(app.iconArtId) || app.iconPath()
if (app.isCloud) // TODO: wrong color
icon = icon.replace(/,white/, ",cyan")
return icon
}
var declIcon:any =
{
globalDef: (d: AST.GlobalDef) =>
colorByPersistence(d.getKind().icon() || "svg:Document,white", d.getRecordPersistence()),
recordDef: (r: AST.RecordDef) =>
colorByPersistence(AST.RecordDef.GetIcon(r.recordType), r.getRecordPersistence()),
recordField: (r: AST.RecordField) =>
declIcon["kind"](r.dataKind),
action: (a:AST.Action) =>
// a.isMainAction() ? "svg:actionMain,white" :
a.isActionTypeDef() ? "svg:Bolt,white" :
a.isPage() ? "svg:Book,white" :
a.isEvent() ? "svg:actionEvent,white" :
a.isPrivate ? "svg:Lock,white" :
a.isOffline ? "svg:SignalAlt,white" :
a.parent && a.parent.isCloud ? "svg:Signal,white" :
"svg:emptyplay,white",
app: (a:AST.App) => appCloudColor(a, null),
localDef: (d:AST.LocalDef) =>
d.getKind().icon() || "svg:Document,white",
singletonDef: () => "svg:touchDevelop,white",
property: (p:IProperty) =>
p.getResult().getKind().icon() || "svg:touchDevelop,white",
declEntry: (d:DeclEntry) => d.icon,
kind: (k:Kind) => k.icon() || "svg:Document,white",
codeLocation: () => "svg:actionLocation,white",
codeLocationCurr: () => "svg:actionLocation,white",
codeLocationLib: () => "svg:actionLocation,white",
libraryRef: (l:AST.LibraryRef) =>
appCloudColor(l.resolved,
l.resolved && l.resolved.icon ? l.resolved.iconPath() : "svg:recycleLib,white"),
};
export function mkPropBox(p:IProperty)
{
var f = p.forwardsTo();
if (f != null)
return mkBox(f);
else
return mkBoxEx(p, "property");
}
export function mkKindBox(p:Kind)
{
return mkBoxEx(p, "kind");
}
export function mkBox(decl: AST.Decl) { return mkBoxEx(decl, decl.nodeType()); }
export function mkNameSpaceDecl(decl: any) {
var ns = null;
if (decl.getNamespace) {
ns = span("navSig symbol", decl.getNamespace());
}
var name = decl.getName();
return [ns,name];
}
function iconFromDecl(decl: AST.Decl, tp: string) {
var img;
var iconArtId = decl.getIconArtId ? decl.getIconArtId() : undefined;
if (iconArtId) img = ArtUtil.artImg(iconArtId, true);
else {
var iconPath = declIcon[tp](decl);
img = !iconPath ? <any> text("") : HTML.mkImg(iconPath);
}
var icon = div("navImg", img);
icon.style.backgroundColor = declColor[tp](decl);
return icon;
}
var mdCmt = new MdComments();
export function mkBoxEx(decl:any, tp:string):HTMLElement
{
var icon = iconFromDecl(decl, tp);
var innerElt = div("navItemInner");
var elt= HTML.mkButtonElt("navItem", innerElt);
var sig = null;
var ns = null;
var desc = decl.getBoxInfo ? Util.htmlEscape(decl.getBoxInfo()) : mdCmt.formatInline(decl.getDescription());
var name = decl.getName();
var sigText = decl.getSignature && decl.getSignature();
if (sigText) {
var limit = 18;
if (decl instanceof Kind) limit = 40;
if ((name + sigText).length > limit && sigText != "()") {
if (desc)
desc = Util.htmlEscape(sigText) + " :: " + desc;
else
desc = Util.htmlEscape(sigText);
} else {
sig = span("navSig", decl.getSignature());
}
}
if (decl.getNamespace) {
ns = span("navSig symbol", decl.getNamespace());
}
var descDiv = div("navDescription md-inline")
if (decl instanceof AST.Action && !decl.isAtomic)
desc = "<span class='actionAwait'>" + SVG.getIconSVGCore("clock2,#666,clip=60") + "</span>" + desc;
Browser.setInnerHTML(descDiv, desc)
var suff = null
if (decl instanceof Kind) {
if ((<Kind>decl).isImmutable())
suff = div("navDiamond", SVG.getIconSVG("diamond,#00f,clip=60"))
}
var nameDiv = div("navName", ns, name, sig);
innerElt.setChildren([icon, div("navContent", [nameDiv, descDiv]), suff]);
if (decl.debuggingData && decl.debuggingData.critical && decl.debuggingData.max) {
var scorePartial = decl.debuggingData.critical / decl.debuggingData.max.critical;
var score = Math.floor(scorePartial * 27); // there are 28 colors, first of them is white
var color: string = AST.ExprHolder.heatmapColors[score];
innerElt.style.backgroundColor = color;
}
(<any> elt).theDesc = descDiv;
(<any> elt).theName = nameDiv;
(<any> elt).theNode = decl;
return elt;
}
export function mkKindList(ctx:KindContext, curr:Kind, selected:(k:Kind)=>void)
{
var kinds = Script.getKinds().filter((k:Kind) =>
k.isData && k.hasContext(ctx) && Script.canUseCapability(k.generalCapabilities) &&
k.getParameterCount() <= 1);
function cmp(a:Kind, b:Kind) {
var d = b.listPriority() - a.listPriority();
if (d) return d;
else return a.toString().localeCompare(b.toString());
}
kinds.sort(cmp);
return kinds.map(function (k:Kind) {
var kk = DeclRender.mkKindBox(k);
if (k == curr)
kk.setFlag("selected", true);
Util.clickHandler(kk, function() { selected(k) });
return kk;
});
}
}
}<|fim▁end|> |
export function colorByPersistence(icon: string, pers: AST.RecordPersistence): string {
return AST.RecordDef.colorByPersistence(icon, pers); |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.db import models
import django.template.defaultfilters
from django.db.models import Max
from django.utils.functional import cached_property
# Create your models here.
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey,GenericRelation
#from django.contrib.contenttypes import generic
from django.dispatch import receiver
from django.db.models.signals import post_save,post_delete,pre_save,pre_delete
from django.db.models.signals import m2m_changed
import django.dispatch
position_changed = django.dispatch.Signal(providing_args=["instance"])
valid_changed = django.dispatch.Signal(providing_args=["instance"])
#from santaclara_base.models import PositionAbstract
import re
import heapq
def custom_model_list(model_list):
sections=["Language",
"Place",
"Time span",
"Person",
"Category",
"Author",
"Publisher",
"Book",
"Publication",
"Migr",
"Repository",]
ret={}
for sec in sections:
ret[sec]=[]
for model_dict in model_list:
if model_dict["model_label"] in ["repositorycachebook","repositorycacheauthor","repositoryfailedisbn",]:
ret["Repository"].append(model_dict)
continue
if model_dict["model_label"] in [ "timepoint","timespan","datemodifier" ]:
ret["Time span"].append(model_dict)
continue
if model_dict["model_label"] in [ "language","languagefamily","languagefamilyrelation",
"languagefamilyfamilyrelation","languagevarietytype","languagevariety" ]:
ret["Language"].append(model_dict)
continue
if model_dict["model_label"] in [ "placetype","place","alternateplacename","placerelation" ]:
ret["Place"].append(model_dict)
continue
if model_dict["model_label"] in [ "article","articleauthorrelation","issuetype",
"issue","publication","volumetype","volume" ]:
ret["Publication"].append(model_dict)
continue
if model_dict["model_label"] in [ "nameformat","nametype","nameformatcollection","personcache",
"person","personnamerelation" ]:
ret["Person"].append(model_dict)
continue
if model_dict["model_label"] in [ "categorytreenode","category","categoryrelation",
"categorytimespanrelation", "categoryplacerelation",
"categorypersonrelation",
"categorylanguagerelation" ]:
ret["Category"].append(model_dict)
continue
if model_dict["model_label"] in [ "author","authorrole","authorrelation" ]:
ret["Author"].append(model_dict)
continue
if model_dict["model_label"] in [ "migrauthor","migrpublisherriviste" ]:
ret["Migr"].append(model_dict)
continue
if model_dict["model_label"] in [ "publisherstate","publisheraddress","publisherisbn","publisher",
"publisheraddresspublisherrelation" ]:
ret["Publisher"].append(model_dict)
continue
ret["Book"].append(model_dict)
xret=[]
for sec in sections:
xret.append( (sec,ret[sec]))
return xret
class PositionAbstract(models.Model):
""" Classe astratta per gestire oggetti posizionabili all'interno di un elenco.
Definisce il campo *pos* (posizione) come intero positivo.
Emette il segnale :any:`santaclara_base.signals.position_changed`
quando la posizione viene modificata.
Un modello che estende la classe PositionAbstract e ridefinisce
__init__() o save() deve ricordarsi di richiamare rispettivamente
:any:`PositionAbstract.my_action_post_init
<santaclara_base.models.PositionAbstract.my_action_post_init>` e
:any:`PositionAbstract.my_action_post_save
<santaclara_base.models.PositionAbstract.my_action_post_save>`.
Un modello che estende la classe PositionAbstract con eredità
multipla e in modo che save() e __init__() siano ereditati da
un'altra classe (quindi con PositionAbstract non primo modello tra
i padri), deve ridefinirli in modo o da richiamare
PositionAbstract.save() e PositionAbstract.__init__() oppure da
utilizzare esplicitamente
:any:`PositionAbstract.my_action_post_init
<santaclara_base.models.PositionAbstract.my_action_post_init>` e
:any:`PositionAbstract.my_action_post_save
<santaclara_base.models.PositionAbstract.my_action_post_save>`.
"""
#: Posizione.
pos = models.PositiveIntegerField()
class Meta:
abstract = True
def __init__(self,*args,**kwargs):
super(PositionAbstract, self).__init__(*args, **kwargs)
self.my_action_post_init(*args,**kwargs)
def save(self,*args,**kwargs):
super(PositionAbstract,self).save(*args,**kwargs)
self.my_action_post_save(*args,**kwargs)
def my_action_post_save(self,*args,**kwargs):
""" Se un modello che estende PositionAbstract sovrascrive
save() e non richiama esplicitamente PositionAbstract.save(),
oppure se in caso di eredità multipla il save() del modello
non è PositionAbstract.save(), nel nuovo save() dev'essere
richiamata questa funzione, passandole gli stessi parametri di
save(). """
if self.__original_pos!=self.pos:
position_changed.send(self.__class__,instance=self)
self.__original_pos = self.pos
def my_action_post_init(self,*args,**kwargs):
""" Se un modello che estende PositionAbstract sovrascrive
__init__() e non richiama esplicitamente PositionAbstract.__init__(),
oppure se in caso di eredità multipla il __init__() del modello
non è PositionAbstract.__init__(), nel nuovo __init__() dev'essere
richiamata questa funzione, passandole gli stessi parametri di
__init__(). """
self.__original_pos = self.pos
class LabeledAbstract(models.Model):
label = models.SlugField(unique=True)
description = models.CharField(max_length=1024)
class Meta:
abstract = True
def __str__(self):
return str(self.label)
def clean(self,*args,**kwargs):
self.label = self.label.lower()
super(LabeledAbstract, self).clean(*args, **kwargs)
### time span
class DateModifier(PositionAbstract):
name = models.CharField(max_length=1024)
reverse = models.BooleanField(default=False)
class Meta:
ordering = [ 'pos' ]
def __str__(self):
if self.id==0: return ""
if not self.name: return "-"
return str(self.name)
def save(self,*args,**kwargs):
super(DateModifier, self).save(*args, **kwargs)
for obj in self.timepoint_set.all():
obj.save()
class TimePoint(models.Model):
date = models.IntegerField()
modifier = models.ForeignKey(DateModifier,blank=True,default=0,on_delete=models.PROTECT)
class Meta:
ordering = [ 'modifier','date' ]
unique_together= [ 'modifier','date' ]
def __str__(self):
U=str(abs(self.date))
if self.modifier.id!=0:
U+=" "+str(self.modifier)
return U
def save(self,*args,**kwargs):
if not self.modifier:
self.modifier=DateModifier.objects.get(id=0)
if self.modifier.reverse:
self.date=-abs(self.date)
else:
self.date=abs(self.date)
super(TimePoint, self).save(*args, **kwargs)
def begins(self):
return "; ".join([str(x) for x in self.begin_set.all()])
def ends(self):
return "; ".join([str(x) for x in self.end_set.all()])
def time_spans(self):
L=[str(x) for x in self.begin_set.all()]
L+=[str(x) for x in self.end_set.all()]
L=list(set(L))
return "; ".join(L)
class TimeSpan(models.Model):
begin = models.ForeignKey(TimePoint,related_name="begin_set",on_delete=models.PROTECT)
end = models.ForeignKey(TimePoint,related_name="end_set",on_delete=models.PROTECT)
name = models.CharField(max_length=4096,blank=True)
def __str__(self):
if self.name:
return str(self.name)
return str(self.begin)+"-"+str(self.end)
class Meta:
ordering = [ 'begin','end' ]
def categories(self):
return "; ".join([str(x.category) for x in self.categorytimespanrelation_set.all()])
### language
class Language(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
def families(self):
return "; ".join([str(x.family) for x in self.languagefamilyrelation_set.all()])
def varieties(self):
return "; ".join([str(x) for x in self.languagevariety_set.all()])
class LanguageFamily(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
def parents(self):
return "; ".join([str(x.parent) for x in self.parent_set.all()])
def children(self):
return "; ".join([str(x.child) for x in self.child_set.all()])
def languages(self):
return "; ".join([str(x.language) for x in self.languagefamilyrelation_set.all()])
class LanguageFamilyRelation(models.Model):
language = models.ForeignKey(Language,on_delete=models.PROTECT)
family = models.ForeignKey(LanguageFamily,on_delete=models.PROTECT)
def __str__(self):
return str(self.family)+"/"+str(self.language)
class LanguageFamilyFamilyRelation(models.Model):
parent = models.ForeignKey(LanguageFamily,related_name="child_set",on_delete=models.PROTECT)
child = models.ForeignKey(LanguageFamily,related_name="parent_set",on_delete=models.PROTECT)
def __str__(self):
return str(self.parent)+"/"+str(self.child)
class Meta:
ordering = ["parent","child"]
class LanguageVarietyType(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
class LanguageVariety(models.Model):
name = models.CharField(max_length=4096,blank=True)
language = models.ForeignKey(Language,on_delete=models.PROTECT)
type = models.ForeignKey(LanguageVarietyType,default=1,on_delete=models.PROTECT)
def __str__(self):
if self.type.id==1:
return str(self.language)
if not self.name:
return str(self.language)
return str(self.language)+" ("+str(self.name)+")"
### place
class PlaceType(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
class Place(models.Model):
name = models.CharField(max_length=4096,unique=True)
type = models.ForeignKey(PlaceType,on_delete=models.PROTECT)
def __str__(self):
return self.name
def alternate_names(self):
return "; ".join([str(x.name) for x in self.alternateplacename_set.all()])
def areas(self):
return "; ".join([str(x.area) for x in self.area_set.all()])
def places(self):
return "; ".join([str(x.place) for x in self.place_set.all()])
class Meta:
ordering = [ "name" ]
class AlternatePlaceName(models.Model):
place = models.ForeignKey(Place,on_delete=models.PROTECT)
name = models.CharField(max_length=4096)
note = models.CharField(max_length=65536,blank=True)
def __str__(self):
return self.name
class PlaceRelation(models.Model):
place = models.ForeignKey(Place,related_name="area_set",on_delete=models.PROTECT)
area = models.ForeignKey(Place,related_name="place_set",on_delete=models.PROTECT)
def __str__(self):
return str(self.area)+"/"+str(self.place)
class Meta:
ordering = ["area","place"]
### person
class NameFormat(LabeledAbstract):
pattern = models.CharField(max_length=1024)
class Meta:
ordering = ["label"]
def save(self, *args, **kwargs):
super(NameFormat, self).save(*args, **kwargs)
for coll in self.long_format_set.all():
coll.save()
for coll in self.short_format_set.all():
coll.save()
for coll in self.ordering_format_set.all():
coll.save()
for coll in self.list_format_set.all():
coll.save()
class NameType(LabeledAbstract): pass
RE_NAME_SEP=re.compile("('| |-)")
VONS=["von","di","da","del","della","dell","dello","dei","degli","delle","de","d","la","lo",
"dal","dalla","dall","dallo","dai","dagli","dalle","al","ibn"]
ROMANS=["I","II","III","IV","V","VI","VII","VIII","IX","X",
"XI","XII","XIII","XIV","XV","XVI","XVII","XVIII","XIX","XX",
"XXI","XXII","XXIII","XXIV","XXV","XXVI","XXVII","XXVIII","XXIX","XXX",
"XXXI","XXXII","XXXIII","XXXIV","XXXV","XXXVI","XXXVII","XXXVIII","XXXIX","XL",
"XLI","XLII","XLIII","XLIV","XLV","XLVI","XLVII","XLVIII","XLIX","L"]
class NameFormatCollectionManager(models.Manager):
def get_preferred(self,num_fields):
preferred_list=self.all().filter(preferred=True)
for format_c in preferred_list:
fields=format_c.fields
if len(fields)==num_fields:
return format_c
format_max_num=-1
format_max=None
for format_c in self.all():
fields=format_c.fields
if len(fields)==num_fields:
return format_c
if len(fields)>format_max_num:
format_max_num=len(fields)
format_max=format_c
return format_max
def get_format_for_name(self,search):
if not search:
return self.get_preferred(0),[]
if search.lower().replace(".","") in [ "av","aavv" ]:
return self.get_preferred(0),[]
t=RE_NAME_SEP.split(search)
names=[]
t_vons=""
for n in range(0,len(t)):
if not t[n]: continue
if t[n] in [ " ","'" ]:
if t_vons:
t_vons+=t[n]
continue
if t[n]=="-":
if t_vons:
t_vons+="-"
else:
names[-1]+="-"
continue
if t[n].lower() not in VONS:
if names and names[-1].endswith("-"):
names[-1]+=t[n].capitalize()
else:
names.append(t_vons+t[n].capitalize())
t_vons=""
continue
t_vons+=t[n]
return self.get_preferred(len(names)),names
class NameFormatCollection(LabeledAbstract):
long_format = models.ForeignKey(NameFormat,related_name='long_format_set',on_delete=models.PROTECT)
short_format = models.ForeignKey(NameFormat,related_name='short_format_set',on_delete=models.PROTECT)
list_format = models.ForeignKey(NameFormat,related_name='list_format_set',on_delete=models.PROTECT)
ordering_format = models.ForeignKey(NameFormat,related_name='ordering_format_set',on_delete=models.PROTECT)
preferred = models.BooleanField(default=False)
objects = NameFormatCollectionManager()
def save(self, *args, **kwargs):
super(NameFormatCollection, self).save(*args, **kwargs)
for person in self.person_set.all():
person.update_cache()
@cached_property
def fields(self):
L=["name","surname"]
long_name=str(self.long_format.pattern)
short_name=str(self.short_format.pattern)
list_name=str(self.list_format.pattern)
ordering_name=str(self.ordering_format.pattern)
for s in "VALURNIC":
long_name=long_name.replace("{{"+s+"|","{{")
short_name=short_name.replace("{{"+s+"|","{{")
list_name=list_name.replace("{{"+s+"|","{{")
ordering_name=ordering_name.replace("{{"+s+"|","{{")
names=[]
for f in [long_name,short_name,list_name,ordering_name]:
L=[x.replace("{{","").replace("}}","") for x in re.findall(r'{{.*?}}',f)]
for name in L:
if name in names: continue
names.append(name)
return names
### Sintassi dei formati
# {{<name_type>}}: <name_type>
# {{C|<name_type>}}: <name_type> (capitalized)
# {{V|<name_type>}}: <name_type> (capitalized except von, de, ecc.)
# {{L|<name_type>}}: <name_type> (lowered)
# {{U|<name_type>}}: <name_type> (uppered)
# {{A|<name_type>}}: <name_type> as integer in arabic
# {{R|<name_type>}}: <name_type> as integer in roman upper
# {{N|<name_type>}}: <name_type> (lowered and with space => _)
# {{I|<name_type>}}: iniziali (Gian Uberto => G. U.)
def apply_formats(self,names):
long_name=str(self.long_format.pattern)
short_name=str(self.short_format.pattern)
list_name=str(self.list_format.pattern)
ordering_name=str(self.ordering_format.pattern)
list_upper=str(self.list_format.pattern)
list_lower=str(self.list_format.pattern)
names_list=list(names.items())
if not names_list:
return long_name,short_name,list_name,ordering_name,"-","-"
for key,rel in names_list:
val_f=rel.formatted()
long_name=long_name.replace("{{"+key+"}}",val_f["norm"])
short_name=short_name.replace("{{"+key+"}}",val_f["norm"])
list_name=list_name.replace("{{"+key+"}}",val_f["norm"])
ordering_name=ordering_name.replace("{{"+key+"}}",val_f["norm"])
list_upper=list_upper.replace("{{"+key+"}}",val_f["norm_upper"])
list_lower=list_lower.replace("{{"+key+"}}",val_f["norm_lower"])
for k in "VALURNIC":
long_name=long_name.replace("{{"+k+"|"+key+"}}",val_f[k])
short_name=short_name.replace("{{"+k+"|"+key+"}}",val_f[k])
list_name=list_name.replace("{{"+k+"|"+key+"}}",val_f[k])
ordering_name=ordering_name.replace("{{"+k+"|"+key+"}}",val_f[k])
if k in "AR":
list_upper=list_upper.replace("{{"+k+"|"+key+"}}",val_f[k])
list_lower=list_lower.replace("{{"+k+"|"+key+"}}",val_f[k])
else:
list_upper=list_upper.replace("{{"+k+"|"+key+"}}",val_f["norm_upper"])
list_lower=list_lower.replace("{{"+k+"|"+key+"}}",val_f["norm_lower"])
return long_name,short_name,list_name,ordering_name,list_upper[0],list_lower[0]
class PersonCache(models.Model):
long_name = models.CharField(max_length=4096,default="-")
short_name = models.CharField(max_length=4096,default="-")
list_name = models.CharField(max_length=4096,default="-")
ordering_name = models.CharField(max_length=4096,default="-")
upper_initial = models.CharField(max_length=4,default="-")
lower_initial = models.CharField(max_length=4,default="-")
class Meta:
ordering = ["ordering_name"]
db_table = 'bibliography_personcache'
def __str__(self): return self.list_name
class PersonManager(models.Manager):
def search_names(self,names):
qset=self.all()
if len(names)==0: return qset
#D=[]
for name in names:
if name.endswith("."):
name=name[:-1]
qset=qset.filter(personnamerelation__value__istartswith=name)
elif len(name)==1:
qset=qset.filter(personnamerelation__value__istartswith=name)
else:
qset=qset.filter(personnamerelation__value__iexact=name)
# if qset.count()>0: return qset.select_related("cache")
# if len(names)==1: return qset.select_related("cache")
# if len(names)==2:
# newnames=[ " ".join(names) ]
# return self.search_names(newnames)
# L=len(names)
# for n in range(0,L-1):
# newnames=names[0:n] + [ " ".join(names[n:n+2])] + names[n+2:L]
# qset=self.search_names(newnames)
# if qset.count()>0: return qset.select_related("cache")
return qset.select_related("cache")
def filter_by_name(self,search):
search=search.replace(" , "," ")
search=search.replace(", "," ")
search=search.replace(" ,"," ")
search=search.replace(","," ")
if search.lower() in [ "--","","- -","-","aavv","aa.vv.","aa. vv."]:
format_c=NameFormatCollection.objects.get(label="aavv")
qset=self.all().filter(format_collection=format_c)
return qset
t_name=search.lower().split(" ")
return self.search_names(t_name)
def look_for(self,name_list):
old={}
new=[]
for name in name_list:
qset=self.filter_by_name(name)
if qset.count():
old[name]=(qset.first())
else:
new.append(name)
return old,new
def create_by_names(self,format_collection,**kwargs):
obj=self.create(format_collection=format_collection)
for key,val in list(kwargs.items()):
name_type,created=NameType.objects.get_or_create(label=key)
rel,created=PersonNameRelation.objects.get_or_create(person=obj,name_type=name_type,
defaults={"value": val})
if not created:
rel.value=val
rel.save()
return obj
class Person(models.Model):
format_collection = models.ForeignKey(NameFormatCollection,on_delete=models.PROTECT)
cache = models.OneToOneField(PersonCache,editable=False,null=True,on_delete=models.PROTECT)
names = models.ManyToManyField(NameType,through='PersonNameRelation',blank=True)
objects = PersonManager()
class Meta:
ordering = ["cache"]
db_table = 'bibliography_person'
def __str__(self):
return self.list_name()
def long_name(self): return str(self.cache.long_name)
def short_name(self): return str(self.cache.short_name)
def ordering_name(self): return str(self.cache.ordering_name)
def list_name(self): return str(self.cache.list_name)
def upper_initial(self): return str(self.cache.upper_initial)
def lower_initial(self): return str(self.cache.lower_initial)
def save(self, *args, **kwargs):
if not self.cache:
self.cache = PersonCache.objects.create()
super(Person, self).save(*args, **kwargs)
self.update_cache()
def update_cache(self):
names={}
for rel in self.personnamerelation_set.all():
names[str(rel.name_type.label)]=rel
long_name,short_name,list_name,ordering_name,upper_initial,lower_initial=self.format_collection.apply_formats(names)
self.cache.long_name = long_name
self.cache.short_name = short_name
self.cache.list_name = list_name
self.cache.ordering_name = ordering_name
self.cache.upper_initial = upper_initial
self.cache.lower_initial = lower_initial
self.cache.save()
class PersonNameRelation(models.Model):
person = models.ForeignKey(Person,on_delete=models.PROTECT)
name_type = models.ForeignKey(NameType,on_delete=models.PROTECT)
value = models.CharField(max_length=4096,default="-",db_index=True)
case_rule = models.CharField(max_length=128,choices=[ ("latin","latin"),
("turkic","turkic") ],
default="latin")
def __str__(self): return str(self.value)
def save(self, *args, **kwargs):
super(PersonNameRelation, self).save(*args, **kwargs)
self.person.update_cache()
def _upper(self,x):
if self.case_rule=="latin":
return x.upper()
x=x.replace("ı","I")
x=x.replace("i","İ")
return x.upper()
def _lower(self,x):
if self.case_rule=="latin":
return x.lower()
x=x.replace("I","ı")
x=x.replace("İ","i")
return x.lower()
def _capitalize(self,x):
if self.case_rule=="latin":
return x.capitalize()
return self._upper(x[0])+self._lower(x[1:])
### Sintassi dei formati
# {{<name_type>}}: <name_type>
# {{C|<name_type>}}: <name_type> (capitalized)
# {{V|<name_type>}}: <name_type> (capitalized except von, de, ecc.)
# {{L|<name_type>}}: <name_type> (lowered)
# {{U|<name_type>}}: <name_type> (uppered)
# {{A|<name_type>}}: <name_type> as integer in arabic
# {{R|<name_type>}}: <name_type> as integer in roman upper
# {{N|<name_type>}}: <name_type> (lowered and with space => _)
# {{I|<name_type>}}: iniziali (Gian Uberto => G. U.)
def formatted(self):
val=str(self.value)
val_f={}
t=RE_NAME_SEP.split(val)
#t=map(lambda x: self._capitalize(x),RE_NAME_SEP.split(val))
vons_t=[]
norm_t=[]
for x in t:
if self._lower(x) in VONS:
vons_t.append(self._lower(x))
else:
if len(x)==1 and x.isalpha():
vons_t.append(self._upper(x)+".")
else:
vons_t.append(self._capitalize(x))
if len(x)==1 and x.isalpha():
norm_t.append(x+".")
else:
norm_t.append(x)
cap_t=[self._capitalize(x) for x in norm_t]
val_norm="".join(norm_t)
val_f["L"]=self._lower(val)
val_f["U"]=self._upper(val)
val_f["N"]=self._lower(val).replace(" ","_")
val_f["I"]=". ".join([x[0].upper() for x in list(filter(bool,val.split(" ")))])+"."
val_f["C"]="".join(cap_t)
val_f["V"]="".join(vons_t)
if val.isdigit():
val_f["R"]=ROMANS[int(val)-1]
val_f["A"]="%3.3d" % int(val)
else:
val_f["R"]=""
val_f["A"]=""
val_f["norm"]=val_norm
val_f["norm_upper"]=self._upper(val_norm)
val_f["norm_lower"]=self._lower(val_norm)
return val_f
# long_name=long_name.replace("{{"+key+"}}",val_norm)
# short_name=short_name.replace("{{"+key+"}}",val_norm)
# list_name=list_name.replace("{{"+key+"}}",val_norm)
# ordering_name=ordering_name.replace("{{"+key+"}}",val_norm)
# for k in "VALURNIC":
# long_name=long_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# short_name=short_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# list_name=list_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# ordering_name=ordering_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# return long_name,short_name,list_name,ordering_name
### category
class CategoryTreeNodeManager(models.Manager):
def roots(self):
return self.filter(level=0)
def until_level(self,level,only_category=True):
if not only_category:
return self.filter(level__lte=level)
return self.filter(level__lte=level,is_category=True)
def branch_nodes(self,base_node,level,only_category=True):
if not only_category:
return self.filter(level=level,node_id__istartswith=base_node.node_id+":")
return self.filter(level=level,node_id__istartswith=base_node.node_id+":",is_category=True)
def update_category(self,cat):
ctype = ContentType.objects.get_for_model(Category)
for cat_node in self.filter(content_type=ctype,object_id=cat.id):
level=int(cat_node.level)
old_node_id=str(cat_node.node_id)
parent_node_id=":".join(old_node_id.split(":")[:-1])
if parent_node_id:
new_node_id=parent_node_id+":"+cat.label
else:
new_node_id=cat.label
cat_node.node_id=new_node_id
cat_node.save()
if not cat_node.has_children: return
cat_children=list(self.filter(node_id__istartswith=old_node_id+":",level=level+1))
for child in cat_children:
self.reparent(new_node_id,level,child)
def remove_category(self,cat):
ctype = ContentType.objects.get_for_model(Category)
node_ids=[]
for cat_node in self.filter(content_type=ctype,object_id=cat.id):
node_ids.append(cat_node.node_id)
self.filter(node_id__istartswith=cat_node.node_id+':').delete()
cat_node.delete()
def create_category(self,cat):
newobj=self.create(content_object=cat,node_id=cat.label,has_children=False,level=0)
newobj.save()
return newobj
def reparent(self,parent_node_id,parent_level,cat_node):
ret=[]
old_node_id=str(cat_node.node_id)
old_level=int(cat_node.level)
rel_node_id=old_node_id.split(":")[-1]
if parent_node_id:
new_node_id=parent_node_id+":"+rel_node_id
else:
new_node_id=rel_node_id
if parent_level>=0:
new_level=parent_level+1
else:
new_level=0
cat_node.node_id=new_node_id
cat_node.level=new_level
cat_node.save()
ret.append(("R",cat_node))
if not cat_node.has_children: return ret
cat_children=list(self.filter(node_id__istartswith=old_node_id+":"))
for cch_node in cat_children:
new_cch_node_id=str(cch_node.node_id).replace(old_node_id+":",new_node_id+":",1)
new_cch_level=int(cch_node.level)-old_level+new_level
cch_node.node_id=new_cch_node_id
cch_node.level=new_cch_level
cch_node.save()
ret.append(("R",cch_node))
return ret
def clone(self,parent_node_id,parent_level,cat_node):
ret=[]
old_node_id=str(cat_node.node_id)
old_level=int(cat_node.level)
rel_node_id=old_node_id.split(":")[-1]
if parent_node_id:
new_node_id=parent_node_id+":"+rel_node_id
else:
new_node_id=rel_node_id
if parent_level>=0:
new_level=parent_level+1
else:
new_level=0
newobj=self.create(content_object=cat_node.content_object,
node_id=new_node_id,
has_children=cat_node.has_children,
level=new_level)
newobj.save()
ret.append(("C",newobj))
if not cat_node.has_children: return ret
cat_children=list(self.filter(node_id__istartswith=old_node_id+":"))
for cch_node in cat_children:
new_cch_node_id=str(cch_node.node_id).replace(old_node_id+":",new_node_id+":",1)
new_cch_level=int(cch_node.level)-old_level+new_level
newobj=self.create(content_object=cch_node.content_object,
node_id=new_cch_node_id,
has_children=cch_node.has_children,
level=new_cch_level)
newobj.save()
ret.append(("C",newobj))
return ret
def add_child_category(self,parent,child):
parent_nodes=list(parent.tree_nodes.all())
child_nodes=list(child.tree_nodes.all())
cn=child_nodes[0]
startind=0
new_objects=[]
if len(child_nodes)==1 and child_nodes[0].level==0:
## l'unico child è un rootnode
fn=parent_nodes[0]
new_objects=self.reparent(str(fn.node_id),int(fn.level),cn)
startind=1
fn.has_children=True
fn.save()
for fn in parent_nodes[startind:]:
new_objects+=self.clone(str(fn.node_id),int(fn.level),cn)
fn.has_children=True
fn.save()
return new_objects
def remove_child_category(self,parent,child):
parent_nodes=list(parent.tree_nodes.all())
child_nodes=list(child.tree_nodes.all())
del_list=[]
for fn in parent_nodes:
fn_node_id=str(fn.node_id)
for cn in child_nodes:
cn_node_id=str(cn.node_id)
cn_rel_node_id=cn_node_id.split(":")[-1]
if cn_node_id==fn_node_id+":"+cn_rel_node_id:
del_list.append((fn,cn))
break
if len(del_list)==len(child_nodes):
objs=self.clone("",-1,child_nodes[0])
for action,obj in objs:
obj.save()
for parent,node in del_list:
self.remove_branch(node)
parent.has_children=bool(self.filter(node_id__istartswith=str(parent.node_id)+":").exists())
parent.save()
def update_child_category(self,old_parent,old_child,new_parent,new_child):
if not old_parent and not old_child: return
if (old_parent==new_parent) and (old_child==new_child): return
self.remove_child_category(old_parent,old_child)
self.add_child_category(new_parent,new_child)
def remove_branch(self,basenode):
base_node_id=str(basenode.node_id)
self.filter(node_id__istartswith=base_node_id+":").delete()
self.filter(node_id=base_node_id).delete()
def add_category_relation(self,cat,child):
parent_nodes=list(cat.tree_nodes.all())
ret=[]
for fn in parent_nodes:
new_node_id=str(fn.node_id)+":"+str(child.id)
new_level=int(fn.level)+1
newobj=self.create(content_object=child,
node_id=new_node_id,
has_children=False,
level=new_level)
ret.append(("C",newobj))
fn.has_children=True
fn.save()
return ret
def remove_category_relation(self,cat,child):
parent_nodes=list(cat.tree_nodes.all())
node_ids=[]
for fn in parent_nodes:
node_ids.append(str(fn.node_id)+":"+str(child.id))
self.filter(node_id__in=node_ids).delete()
for fn in parent_nodes:
fn.has_children=bool(self.filter(node_id__istartswith=str(fn.node_id)+":").exists())
fn.save()
def update_category_relation(self,old_cat,old_child,new_cat,new_child):
if not old_cat and not old_child: return
if (old_cat==new_cat) and (old_child==new_child): return
self.remove_category_relation(old_cat,old_child)
self.add_category_relation(new_cat,new_child)
def get_num_objects(self,catnode):
if not catnode.is_category: return 1
N=self.filter(node_id__istartswith=catnode.node_id+":",is_category=False).values("content_type","object_id").distinct().count()
return N
def max_level(self,only_cat=True):
if not only_cat:
return self.all().aggregate(Max('level'))["level__max"]
return self.filter(is_category=True).aggregate(Max('level'))["level__max"]
class CategoryTreeNode(models.Model):
content_type = models.ForeignKey(ContentType,on_delete=models.PROTECT)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type','object_id')
node_id = models.CharField(max_length=4096,unique=True)
has_children = models.BooleanField()
level = models.PositiveIntegerField()
objects = CategoryTreeNodeManager()
label = models.CharField(max_length=4096,editable=False)
label_children = models.CharField(max_length=4096,editable=False)
is_category = models.BooleanField(editable=False)
num_objects = models.PositiveIntegerField(editable=False)
def branch_depth(self,only_cat=True):
if only_cat:
ret=CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",is_category=True).aggregate(Max('level'))["level__max"]
else:
ret=CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":").aggregate(Max('level'))["level__max"]
if not ret: return 0
return ret
def branch_level_size(self,level,only_cat=True):
if only_cat:
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",
level=level,is_category=True).count()
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",level=level).count()
def branch(self,only_cat=True):
if only_cat:
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",is_category=True)
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":")
def __str__(self):
U= "%3d %s" % (int(self.level),str(self.node_id))
return U
def direct_size(self):
if not self.is_category: return 0
return self.content_object.child_set.count()
class Meta:
ordering = [ "node_id" ]
def save(self, *args, **kwargs):
self.label_children="_"+str(self.node_id).replace(":","_")
t=str(self.node_id).split(":")
if len(t)==1:
self.label=""
else:
self.label="_"+"_".join(t[:-1])
self.is_category=( self.content_type.model_class() == Category )
self.num_objects = CategoryTreeNode.objects.get_num_objects(self)
super(CategoryTreeNode, self).save(*args, **kwargs)
class CategoryManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
class CategoryQueryset(models.query.QuerySet):
def all_in_branch(self,parent_id):
parent=Category.objects.get(id=int(parent_id))
children_ids=[parent.id]
for catnode in parent.tree_nodes.all():
L=catnode.branch()
children_ids+=[x.object_id for x in list(L)]
children_ids=list(set(children_ids))
return self.filter(id__in=children_ids)
return CategoryQueryset(Category)
def query_set_branch(self,queryset,parent_id):
parent=Category.objects.get(id=int(parent_id))
children_ids=[parent.id]
for catnode in parent.tree_nodes.all():
L=catnode.branch()
children_ids+=[x.object_id for x in list(L)]
children_ids=list(set(children_ids))
return queryset.filter(id__in=children_ids)
def all_in_branch(self,parent_id):
return self.get_query_set().all_in_branch(parent_id)
def merge(self,cat_queryset):
new_name="[merge]"
old_cats=list(cat_queryset.all())
for cat in old_cats:
new_name+=" "+cat.name
new_cat=self.create(name=new_name)
children=[]
for catrel in CategoryRelation.objects.filter(parent__in=old_cats):
if catrel.child in children:
catrel.delete()
continue
catrel.parent=new_cat
children.append(catrel.child)
catrel.save()
parents=[]
for catrel in CategoryRelation.objects.filter(child__in=old_cats):
if new_cat==catrel.parent:
catrel.delete()
continue
if catrel.parent in parents:
catrel.delete()
continue
catrel.child=new_cat
parents.append(catrel.parent)
catrel.save()
L=[]
for catrel in CategoryTimeSpanRelation.objects.filter(category__in=old_cats):
if catrel.time_span in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.time_span)
L=[]
for catrel in CategoryPlaceRelation.objects.filter(category__in=old_cats):
if catrel.place in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.place)
L=[]
for catrel in CategoryPersonRelation.objects.filter(category__in=old_cats):
if catrel.person in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.person)
L=[]
for catrel in CategoryLanguageRelation.objects.filter(category__in=old_cats):
if catrel.language in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.language)
for cat in old_cats:
for book in cat.book_set.all():
book.categories.add(new_cat)
book.categories.remove(cat)
cat.delete()
class Category(models.Model):
name = models.CharField(max_length=4096,unique=True)
label = models.SlugField(max_length=4096,editable=False,unique=True)
tree_nodes = GenericRelation(CategoryTreeNode)
objects = CategoryManager()
def __str__(self): return str(self.name)
class Meta:
ordering = ["name"]
def slugify(self):
S=str(self.name)
S=S.replace("#","sharp")
S=S.replace("++","plusplus")
return django.template.defaultfilters.slugify(S)
def save(self, *args, **kwargs):
self.label = self.slugify()
super(Category, self).save(*args, **kwargs)
def parents(self):
return "; ".join([str(x.parent) for x in self.parent_set.all()])
def children(self):
return "; ".join([str(x.child) for x in self.child_set.all()])
def time_span(self):
return "; ".join([str(x.time_span) for x in self.categorytimespanrelation_set.all()])
def place(self):
return "; ".join([str(x.place) for x in self.categoryplacerelation_set.all()])
def person(self):
return "; ".join([str(x.person) for x in self.categorypersonrelation_set.all()])
def language(self):
return "; ".join([str(x.language) for x in self.categorylanguagerelation_set.all()])
def num_books(self):
return self.book_set.count()
def min_level(self):
level=-1
for node in self.tree_nodes.all():
if level<0:
level=node.level
continue
level=min(level,node.level)
return level
def num_objects(self):
node=self.tree_nodes.all().first()
return node.num_objects
def my_branch_depth(self):
node=self.tree_nodes.all().first()
return node.branch_depth()
def my_branch_id(self):
level=-1
elected=None
for node in self.tree_nodes.all():
if level<0:
elected=node
level=node.level
continue
if level<=node.level: continue
elected=node
level=node.level
node_id=elected.node_id
big_parent_id=node_id.split(":")[0]
#big_parent_node=CategoryTreeNode.objects.get(node_id=big_parent_id)
return big_parent_id
class CategoryRelation(models.Model):
child = models.ForeignKey(Category,related_name="parent_set",on_delete=models.PROTECT)
parent = models.ForeignKey(Category,related_name="child_set",on_delete=models.PROTECT)
def __str__(self):
return str(self.parent)+"/"+str(self.child)
class Meta:
ordering = ["parent","child"]
class CategoryTimeSpanRelation(models.Model):
time_span=models.ForeignKey(TimeSpan,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.time_span)+"/"+str(self.category)
class CategoryPlaceRelation(models.Model):
place=models.ForeignKey(Place,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.place)+"/"+str(self.category)
class CategoryPersonRelation(models.Model):
person=models.ForeignKey(Person,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.person)+"/"+str(self.category)
class CategoryLanguageRelation(models.Model):
language=models.ForeignKey(LanguageVariety,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.language)+"/"+str(self.category)
class CategorizedObject(models.Model):
categories = models.ManyToManyField(Category,blank=True)
class Meta:
abstract = True
def get_categories(self):
return "; ".join([str(x) for x in self.categories.all()])
### authors
def print_result(label):
def g(func):
def f(*args):
res=func(*args)
print(label,res,*args)
return res
return f
return g
class AuthorManager(PersonManager):
def catalog(self):
class PubTuple(tuple):
def __new__ (cls, year,role,obj):
x=super(PubTuple, cls).__new__(cls, tuple( (year,role,obj) ))
return x
def __str__(self):
return "(%s,%s,%s)" % (str(self._year),str(self._role),str(self._obj))
def __init__(self,year,role,obj):
self._year=year
self._role=role
self._obj=obj
#@print_result("EQ")
def __eq__(self,other):
if self._year!=other._year: return False
if type(self._obj) is not type(other._obj): return False
return self._obj.id == other._obj.id
#@print_result("LT")
def __lt__(self,other):
if self._year < other._year: return True
if self._year > other._year: return False
if type(self._obj) is type(other._obj):
return self._obj.id < other._obj.id
if type(self._obj) is Book: return True
if type(other._obj) is Book: return False
return type(self._obj) is Issue
# if isinstance(self._obj,Book):
# if isinstance(other._obj,Book):
# if self._obj.title == other._obj.title:
# return self._obj.id < other._obj.id
# return self._obj.title < other._obj.title
# return True
# if isinstance(other._obj,Book): return False
# if isinstance(self._obj,Issue):
# s_date=self._obj.date
# else:
# s_date=self._obj.issue.date
# if isinstance(other._obj,Issue):
# o_date=other._obj.date
# else:
# o_date=other._obj.issue.date
# if s_date<o_date: return True
# if s_date>o_date: return False
# if type(self._obj) is not type(other._obj):
# return type(self._obj) is Issue
# if self._obj.title == other._obj.title:
# return self._obj.id < other._obj.id
# return self._obj.title < other._obj.title
def _gt__(self,other): return other.__lt__(self)
def _le__(self,other): return self.__eq__(other) or self.__lt__(other)
def _ge__(self,other): return self.__eq__(other) or self.__gt__(other)
def _ne__(self,other): return not self.__eq__(other)
class CatAuthor(object):
def __init__(self,db_author):
self._db_author=db_author
self.id=db_author.id
self.list_name=db_author.list_name()
self.long_name=db_author.long_name()
self.ordering_name=db_author.ordering_name()
self._publications=[]
def add(self,pub):
heapq.heappush(self._publications, pub)
@property
def publications(self):
return heapq.nsmallest(len(self._publications), self._publications)
issues=[ (rel.author,rel.author_role,rel.issue)
for rel in IssueAuthorRelation.objects.all().select_related() ]
books=[ (rel.author,rel.author_role,rel.book)
for rel in BookAuthorRelation.objects.all().select_related() ]
articles=[ (rel.author,rel.author_role,rel.article)
for rel in ArticleAuthorRelation.objects.all().select_related() ]
authors=[ CatAuthor(aut) for aut in self.all().select_related().prefetch_related("cache") ]
dict_aut={ aut.id: aut for aut in authors }
for aut,role,obj in issues:
dict_aut[aut.id].add( PubTuple(obj.year(),role,obj) )
for aut,role,obj in books:
dict_aut[aut.id].add( PubTuple(obj.year,role,obj) )
for aut,role,obj in articles:
dict_aut[aut.id].add( PubTuple(obj.year(),role,obj) )
return authors
#return self.all().select_related().prefetch_related("cache","authorrelation_set")
class Author(Person):
objects=AuthorManager()
class Meta:
proxy = True
def publications(self):
L=[]
for rel in self.authorrelation_set.all().select_related():
L.append( (rel.year,rel.author_role,rel.actual()) )
return L
def get_absolute_url(self):
return "/bibliography/author/%d" % self.pk
def save(self,*args,**kwargs):
Person.save(self,*args,**kwargs)
class AuthorRole(LabeledAbstract):
cover_name = models.BooleanField(default=False)
action = models.CharField(default="",max_length=1024,blank=True)
pos = models.IntegerField(unique=True)
class AuthorRelation(models.Model):
author = models.ForeignKey(Author,on_delete=models.PROTECT)
author_role = models.ForeignKey(AuthorRole,on_delete=models.PROTECT)
content_type = models.ForeignKey(ContentType,editable=False,null=True,on_delete=models.PROTECT)
year = models.IntegerField(editable=False,db_index=True)
#year_label = models.CharField(max_length=10,editable=False)
#title = models.CharField(max_length=4096)
class Meta:
ordering = [ "year" ]
def _year(self): return 0
def _title(self): return ""
def html(self): return ""
def update_year(self):
try:
self.year=self.actual()._year()
except:
self.year=self._year()
self.save()
def actual(self):
model = self.content_type.model
return self.__getattribute__(model)
def save(self,*args, **kwargs):
if (not self.content_type):
self.content_type = ContentType.objects.get_for_model(self.__class__)
try:
self.year=self.actual()._year()
except:
self.year=self._year()
super(AuthorRelation, self).save(*args, **kwargs)
def clean(self,*args,**kwargs):
self.year=self._year()
super(AuthorRelation, self).clean(*args, **kwargs)
class MigrAuthor(models.Model):
cod = models.CharField(max_length=1,default="-",db_index=True)
ind = models.IntegerField(db_index=True)
author = models.ForeignKey(Author,on_delete=models.PROTECT)
def __str__(self): return str(self.cod)+str(self.ind)+" "+str(self.author)
### publishers
class PublisherState(models.Model):
name = models.CharField(max_length=4096)
class Meta:
ordering = ["name"]
def __str__(self): return str(self.name)
class PublisherAddress(models.Model):
city = models.CharField(max_length=4096)
state = models.ForeignKey(PublisherState,on_delete=models.PROTECT)
def __str__(self): return str(self.city)+" - "+str(self.state)
class Meta:
ordering = ["city"]
class PublisherIsbnManager(models.Manager):
def isbn_alpha(self):
return self.all().filter(isbn__iregex=r'^[a-z].*')
def split_isbn(self,unseparated):
if not unseparated: return [],[]
isbn_list=[]
for isbn in unseparated:
for n in range(1,9):
isbn_list.append(isbn[:n])
L=[ v.isbn for v in self.filter(isbn__in=isbn_list) ]
if not L:
return [],unseparated
uns=[]
sep=[]
for isbn in unseparated:
trovato=False
for db_isbn in L:
if isbn.startswith(db_isbn):
trovato=True
isbn_book=isbn[len(db_isbn):]
sep.append( (db_isbn,isbn_book) )
break
if not trovato:
uns.append(isbn)
return sep,uns
class PublisherIsbn(models.Model):
isbn = models.CharField(max_length=4096,unique=True,db_index=True)
preferred = models.ForeignKey("Publisher",editable=False,blank=True,on_delete=models.PROTECT)
objects = PublisherIsbnManager()
class Meta:
ordering = ["isbn"]
def update_preferred(self):
self.preferred=self.get_preferred()
self.save()
def get_preferred(self):
if self._state.adding:
return Publisher.objects.get(pk=0)
pubs=list(self.publisher_set.all())
if len(pubs)==0: return Publisher.objects.get(pk=0)
if len(pubs)!=1:
for p in pubs:
if not p.alias:
return p
return pubs[0]
def clean(self,*args,**kwargs):
self.preferred=self.get_preferred()
super(PublisherIsbn, self).clean(*args, **kwargs)
def save(self,*args,**kwargs):
self.preferred=self.get_preferred()
super(PublisherIsbn, self).save(*args, **kwargs)
def __str__(self): return str(self.isbn)
def publishers(self):
return "; ".join(map(str, self.publisher_set.all()))
class PublisherManager(models.Manager):
def add_prefetch(self,obj_list):
qset=self.filter(id__in=[obj.id for obj in obj_list])
qset=qset.prefetch_related("addresses")
return qset
def look_for(self,isbn_list):
qset=PublisherIsbn.objects.filter(isbn__in=isbn_list)
for pub in qset:
isbn_list.remove( pub.isbn )
isbn_ids=[ obj.id for obj in qset ]
p_qset=self.filter(isbns__id__in=isbn_ids).prefetch_related("isbns","addresses")
return p_qset,isbn_list
class Publisher(models.Model):
name = models.CharField(max_length=4096)
full_name = models.CharField(max_length=4096,blank=True)
url = models.CharField(max_length=4096,default="--")
note = models.TextField(blank=True,default="")
addresses = models.ManyToManyField(PublisherAddress,through='PublisherAddressPublisherRelation',blank=True)
alias = models.BooleanField(default=False)
isbns = models.ManyToManyField(PublisherIsbn,blank=True)
objects=PublisherManager()
class Meta:
ordering = ["name"]
def short_name(self):
name=self.show_name().lower()
tname=name.replace(".","").replace(",","").split()
for s in [ "srl", "spa","editore","editrice","edizioni","verlag","publisher","inc",
"éditions","editions","edition","editorial","editori","editoriale","ltd",
"gruppo","publishing","yayın","yayınları","co","publications","press","editoriali"]:
if s in tname:
tname.remove(s)
tname=[ s.capitalize() for s in tname ]
return " ".join(tname)
def clean(self,*args,**kwargs):
if not self.full_name:
self.full_name=self.name
super(Publisher, self).clean(*args, **kwargs)
def __str__(self): return str(self.name)
def address(self):
return " - ".join([str(x.address.city) for x in self.publisheraddresspublisherrelation_set.order_by("pos")])
def show_name(self):
if self.full_name: return self.full_name
return self.name
def html(self):
H=self.name
adrs=self.address()
if adrs:
H+=", "+adrs
return H
@cached_property
def isbn_prefix(self):
return ", ".join([str(x.isbn) for x in self.isbns.all()])
@cached_property
def isbn_list(self):
return [str(x.isbn) for x in self.isbns.all()]
class PublisherAddressPublisherRelation(PositionAbstract):
address = models.ForeignKey(PublisherAddress,on_delete=models.PROTECT)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
def __str__(self): return str(self.publisher)+" ["+str(self.pos)+"] "+str(self.address)
class MigrPublisherRiviste(models.Model):
registro = models.CharField(max_length=4096)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
def __str__(self): return str(self.registro)
### publications
class VolumeType(LabeledAbstract):
read_as = models.CharField(max_length=1024,default="")
class PublicationManager(models.Manager):
def issn_alpha(self):
return self.all().filter(issn_crc='Y')
class Publication(models.Model):
issn = models.CharField(max_length=128) #7
issn_crc = models.CharField(max_length=1,editable=False,default="Y")
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
title = models.CharField(max_length=4096)
volume_type = models.ForeignKey(VolumeType,on_delete=models.PROTECT)
date_format = models.CharField(max_length=4096,default="%Y-%m-%d")
objects=PublicationManager()
#periodicity=models.CharField(max_length=128,choices=[ ("monthly","monthly"),("unknown","unknown") ],default="unknown")
#first_day=models.IntegerField(default=1)
class Meta:
ordering = ['title']
def html(self):
tit=str(self.title)
if not tit: return ""
return "<i>"+tit+"</i>"
def __str__(self): return str(self.title)
def get_absolute_url(self):
return "/bibliography/publication/%d" % self.pk
def update_crc(self):
self.issn_crc = self.crc()
self.save()
def crc(self):
if not str(self.issn).isdigit(): return('Y')
pesi=[8,7,6,5,4,3,2]
cod_lista=list(map(int,list(self.issn)))
if len(cod_lista)<7:
L=len(cod_lista)
cod_lista+=[0 for x in range(L,7)]
crc=11-(sum(map(lambda x,y: x*y,cod_lista,pesi))%11)
if (crc==10): return('X')
if (crc==11): return(0)
return(crc)
def clean(self,*args,**kwargs):
self.issn_crc = self.crc()
super(Publication, self).clean(*args, **kwargs)
def issue_set(self):
return Issue.objects.filter(volume__publication__id=self.id).order_by("date")
class Volume(models.Model):
label = models.CharField(max_length=256,db_index=True)
publication = models.ForeignKey(Publication,on_delete=models.PROTECT)
def __str__(self): return str(self.publication)+" - "+str(self.label)
def html(self):
H=self.publication.html()
if H:
H+=", "
H+=str(self.publication.volume_type.read_as)
if H:
H+=" "
H+=str(self.label)
return H
### publication issues
class IssueType(LabeledAbstract): pass
class IssueManager(models.Manager):
def by_publication(self,publication):
return self.all().filter(volume__publication__id=publication.id).order_by("date")
class Issue(models.Model):
volume = models.ForeignKey(Volume,on_delete=models.PROTECT)
issue_type = models.ForeignKey(IssueType,on_delete=models.PROTECT)
issn_num = models.CharField(max_length=8)
number = models.CharField(max_length=256)
title = models.CharField(max_length=4096,blank=True,default="")
date = models.DateField()
date_ipotetic = models.BooleanField(default=False)
html_cache = models.TextField(blank=True,null=True,default="",editable=False)
authors = models.ManyToManyField(Author,through='IssueAuthorRelation',blank=True)
objects=IssueManager()
class Meta:
ordering = ['date']
def issn(self):
return self.volume.publication.issn
def show_date(self):
D=self.date.strftime(self.volume.publication.date_format)
if self.date_ipotetic:
return D+"?"
return D
def save(self,*args,**kwargs):
self.html_cache=self._html()
return models.Model.save(self,*args,**kwargs)
def html(self): return self.html_cache
def _html(self):
H=self.volume.html()
if H:
H+=", "
H+="n. "+str(self.number)
tit=str(self.title)
if tit:
H+=", <i>"+tit+"</i>"
H+=", "
H+=self.date.strftime("%B %Y")
if self.date_ipotetic:
H+="?"
return H
def __str__(self):
U=str(self.volume)
U+="/"+str(self.number)
if str(self.title):
U+=". "+str(self.title)
return U
def year(self):
return self.date.year
class IssueAuthorRelation(AuthorRelation,PositionAbstract):
issue = models.ForeignKey(Issue,on_delete=models.PROTECT)
def __str__(self): return str(self.author)+", "+str(self.issue)
def _year(self): return int(self.issue.year())
def _title(self): return str(self.issue.title)
def html(self):
print("COM")
print(self.issue.html())
return self.issue.html()
class Meta:
ordering=["pos"]
#unique_together= [ 'author','author_role','issue' ]
def save(self,*args,**kwargs):
if not self.pos:
self.pos=1
return super(IssueAuthorRelation,self).save(*args,**kwargs)
class Article(models.Model):
title = models.CharField(max_length=4096)
issue = models.ForeignKey(Issue,on_delete=models.PROTECT)
page_begin = models.CharField(max_length=10,blank=True,default="x")
page_end = models.CharField(max_length=10,blank=True,default="x")
authors = models.ManyToManyField(Author,through='ArticleAuthorRelation',blank=True)
html_cache = models.TextField(blank=True,null=True,default="",editable=False)
def get_authors(self):
return ", ".join([str(x.author.long_name()) for x in self.articleauthorrelation_set.filter(author_role__cover_name=True).order_by("pos")])
def get_secondary_authors(self):
L=list(self.articleauthorrelation_set.filter(author_role__cover_name=False).order_by("author_role__pos","pos"))
ret=""
curr_pos=-1
comma=True
for rel in L:
if curr_pos!=int(rel.author_role.pos):
action=str(rel.author_role.action).strip()
if action:
if ret:
ret+=", "
ret+=action+" "
comma=False
curr_pos=int(rel.author_role.pos)
if ret and comma: ret+=", "
ret+=rel.author.long_name()
comma=True
return ret
def __str__(self): return str(self.title) #+" ("+unicode(self.year)+")"
def issn(self): return self.issue.issn()
def issn_num(self): return self.issue.issn_num
def year(self): return self.issue.year()
def save(self,*args,**kwargs):
self.html_cache=self._html()
return models.Model.save(self,*args,**kwargs)
def html(self): return self.html_cache
def _html(self):
H=""
H+=self.get_authors()
if H:
H+=", "
H+="“"+str(self.title)+"”, "
sec_authors=self.get_secondary_authors()
if sec_authors:
H+=sec_authors+", "
issue=self.issue.html()
if issue:
H+=issue+", "
if str(self.page_begin)==str(self.page_end):
H+="p. "+str(self.page_begin)
else:
H+="pp. "+str(self.page_begin)+"-"+str(self.page_end)
return H
class ArticleAuthorRelation(AuthorRelation,PositionAbstract):
article = models.ForeignKey(Article,on_delete=models.PROTECT)
def __str__(self): return str(self.author)+", "+str(self.article)
def _year(self): return int(self.article.year())
def _title(self): return str(self.article.title)
def html(self):
print("ART")
print(self.article.html())
return self.article.html()
class Meta:
ordering=["pos"]
### books
class BookManager(models.Manager):
def isbn_alpha(self):
return self.all().filter(isbn_crc10='Y').order_by("isbn_ced","isbn_book","year","title")
def by_isbn_pub(self,isbn):
print("ISBN:",isbn)
return self.all().filter(isbn_ced__iexact=isbn).order_by("isbn_ced","isbn_book","year","title")
def add_prefetch(self,obj_list):
qset=self.filter(id__in=[book.id for book in obj_list])
qset=qset.select_related("publisher").prefetch_related("authors")
return qset
def look_for(self,isbn_list):
if not isbn_list: return None,[]
q=models.Q()
for isbn_ced,isbn_book in isbn_list:
q=q|models.Q(isbn_ced=isbn_ced,isbn_book=isbn_book)
qset=self.filter(q).select_related("publisher").prefetch_related("authors")
new_isbn_list=[]
for book in qset:
isbn_list.remove( (book.isbn_ced,book.isbn_book) )
return qset,isbn_list
class Book(CategorizedObject):
isbn_ced = models.CharField(max_length=9,db_index=True)
isbn_book = models.CharField(max_length=9,db_index=True)
isbn_crc10 = models.CharField(max_length=1,editable=False,default="Y")
isbn_crc13 = models.CharField(max_length=1,editable=False,default="Y")
isbn_cache10 = models.CharField(max_length=20,editable=False,default="")
isbn_cache13 = models.CharField(max_length=20,editable=False,default="")
title = models.CharField(max_length=4096)
year = models.IntegerField()
year_ipotetic = models.BooleanField(default=False)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
authors = models.ManyToManyField(Author,through='BookAuthorRelation',blank=True)
html_cache = models.TextField(blank=True,default="",editable=False)
objects=BookManager()
class Meta:
ordering=["title","year","publisher"]
index_together=[ ["isbn_ced","isbn_book"] ]
def get_authors(self):
return ", ".join([str(x.author.long_name()) for x in self.bookauthorrelation_set.filter(author_role__cover_name=True).order_by("pos")])
def get_absolute_url(self):
U="/bibliography/book/%d" % self.pk
return U
def get_secondary_authors(self):
L=list(self.bookauthorrelation_set.filter(author_role__cover_name=False).order_by("author_role__pos","pos"))
ret=""
curr_pos=-1
comma=True
for rel in L:
if curr_pos!=int(rel.author_role.pos):
action=str(rel.author_role.action).strip()
if action:
if ret:
ret+=", "
ret+=action+" "
comma=False
curr_pos=int(rel.author_role.pos)
if ret and comma: ret+=", "
ret+=rel.author.long_name()
comma=True
return ret
def __str__(self):
if not self.year_ipotetic:
return str(self.title)+" ("+str(self.year)+")"
return str(self.title)+" ("+str(self.year)+"?)"
@cached_property
def html(self): return self.html_cache
def _html(self):
H=""
H+=self.get_authors()
if H:
H+=", "
H+="<i>"+str(self.title)+"</i>, "
sec_authors=self.get_secondary_authors()
if sec_authors:
H+=sec_authors+", "
pub=self.publisher.html()
if pub:
H+=pub+", "
H+=str(self.year)
if self.year_ipotetic: H+="?"
return H
def clean(self,*args,**kwargs):
self.isbn_crc10 = self.crc10()
self.isbn_crc13 = self.crc13()
self.isbn_cache10=self.isbn_ced+self.isbn_book+str(self.crc10())
self.isbn_cache13='978'+self.isbn_ced+self.isbn_book+str(self.crc13())
super(Book, self).clean(*args, **kwargs)
def save(self,*args,**kwargs):
self.isbn_crc10 = self.crc10()
self.isbn_crc13 = self.crc13()
self.isbn_cache10=self.isbn_ced+self.isbn_book+str(self.crc10())
self.isbn_cache13='978'+self.isbn_ced+self.isbn_book+str(self.crc13())
self.html_cache=self._html()
super(Book, self).save(*args, **kwargs)
def update_crc(self):
self.isbn_crc10 = self.crc10()
self.isbn_crc13 = self.crc13()
self.isbn_cache10=self.isbn_ced+self.isbn_book+str(self.crc10())
self.isbn_cache13='978'+self.isbn_ced+self.isbn_book+str(self.crc13())
self.save()
def isbn10(self):
return str(self.isbn_ced)+"-"+str(self.isbn_book)+"-"+str(self.isbn_crc10)
def isbn13(self):
return "978-"+str(self.isbn_ced)+"-"+str(self.isbn_book)+"-"+str(self.isbn_crc13)
def crc10(self):
if not str(self.isbn_book).isdigit(): return('Y')
if not str(self.isbn_ced).isdigit(): return('Y')
isbn=str(self.isbn_ced)+str(self.isbn_book)
pesi=[10,9,8,7,6,5,4,3,2]
cod_lista=list(map(int,list(isbn)))
if len(cod_lista)<9:
L=len(cod_lista)
cod_lista+=[0 for x in range(L,9)]
crc=11-(sum(map(lambda x,y: x*y,cod_lista,pesi))%11)
if (crc==10): return('X')
if (crc==11): return(0)
return(crc)
def crc13(self):
if not str(self.isbn_book).isdigit(): return('Y')
if not str(self.isbn_ced).isdigit(): return('Y')
isbn=str(self.isbn_ced)+str(self.isbn_book)
pesi=[1,3,1,3,1,3,1,3,1,3,1,3]
cod_lista=[9,7,8]+list(map(int,list(isbn)))
if len(cod_lista)<12:
L=len(cod_lista)
cod_lista+=[0 for x in range(L,12)]
crc=10-(sum(map(lambda x,y: x*y,cod_lista,pesi))%10)
if (crc==10): return(0)
return(crc)
class BookAuthorRelation(AuthorRelation,PositionAbstract):
book = models.ForeignKey(Book,on_delete=models.PROTECT)
def __str__(self): return str(self.author)+", "+str(self.book)
def _year(self): return int(self.book.year)
def _title(self): return str(self.book.title)
def html(self): return self.book.html()
def get_absolute_url(self): return self.book.get_absolute_url()
class Meta:
ordering=["pos"]
class TextsCdrom(LabeledAbstract):
books = models.ManyToManyField(Book,blank=True)
# class BookTimeSpanRelation(models.Model):
# time_span=models.ForeignKey(TimeSpan)
# book=models.OneToOneField(Book)
# def __str__(self):
# return unicode(self.time_span)+u"/"+unicode(self.book)
### repository cache
class RepositoryCacheBook(models.Model):
isbn = models.CharField(max_length=13,unique=True)
publisher = models.CharField(max_length=4096,default=" ")
year = models.CharField(max_length=4096,default=" ",blank=True)
title = models.CharField(max_length=4096,default=" ")
city = models.CharField(max_length=4096,default=" ")
indb = models.BooleanField(default=False)
def clean(self,*args,**kwargs):
if not self.year:
self.year=" "
super(RepositoryCacheBook, self).clean(*args, **kwargs)
def __str__(self):
return str(self.isbn)+" "+str(self.title)
class Meta:
ordering = [ "isbn" ]
class RepositoryCacheAuthor(PositionAbstract):
book = models.ForeignKey(RepositoryCacheBook,on_delete=models.PROTECT)
name = models.CharField(max_length=4096)
role = models.CharField(max_length=4096)
def __str__(self):
return self.name
class Meta:
ordering = [ "name" ]
class RepositoryFailedIsbn(models.Model):
isbn10 = models.CharField(max_length=4096)
isbn13 = models.CharField(max_length=4096)
def __str__(self):
return self.isbn10+"/"+self.isbn13
class Meta:
ordering = [ "isbn10" ]
### others
class BookSerieWithoutIsbn(models.Model):
isbn_ced = models.CharField(max_length=9,db_index=True)
isbn_book_prefix = models.CharField(max_length=9,db_index=True)
title = models.CharField(max_length=4096)
title_prefix = models.CharField(max_length=4096,default='',blank=True)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
def __str__(self): return str(self.title)
### signals
def category_post_save_handler(sender,instance,created,raw,using,update_fields,**kwargs):
if raw: return
if created:
CategoryTreeNode.objects.create_category(instance)
else:
CategoryTreeNode.objects.update_category(instance)
post_save.connect(category_post_save_handler,sender=Category)
def category_pre_delete_handler(sender,instance,using,**kwargs):
CategoryTreeNode.objects.remove_category(instance)
pre_delete.connect(category_pre_delete_handler,sender=Category)
class CategoryRelationChangeHandler(object):
def __init__(self):
self.old_parents={}
self.old_children={}
def pre_save(self,sender,instance,raw,using,update_fields,**kwargs):
if raw: return
if not instance.id: return
old_obj=CategoryRelation.objects.get(id=instance.id)
self.old_parents[instance.id]=old_obj.parent
self.old_children[instance.id]=old_obj.child
def post_save(self,sender,instance,created,raw,using,update_fields,**kwargs):
if raw: return
if created:
CategoryTreeNode.objects.add_child_category(instance.parent,instance.child)
return
old_parent=None
old_child=None
if instance.id in self.old_parents:
old_parent=self.old_parents[instance.id]
del(self.old_parents[instance.id])
if instance.id in self.old_children:
old_child=self.old_children[instance.id]
del(self.old_children[instance.id])
CategoryTreeNode.objects.update_child_category(old_parent,old_child,instance.parent,instance.child)
categoryrelation_save_handler=CategoryRelationChangeHandler()
post_save.connect(categoryrelation_save_handler.post_save,sender=CategoryRelation)
pre_save.connect(categoryrelation_save_handler.pre_save,sender=CategoryRelation)
def categoryrelation_pre_delete_handler(sender,instance,using,**kwargs):
CategoryTreeNode.objects.remove_child_category(instance.parent,instance.child)
pre_delete.connect(categoryrelation_pre_delete_handler,sender=CategoryRelation)
def categorizedobjectcategoryrelation_m2m_changed_handler(sender, instance, action, reverse,model,pk_set,using,**kwargs):
if action=="post_add":
function=CategoryTreeNode.objects.add_category_relation
elif action=="pre_remove":
function=CategoryTreeNode.objects.remove_category_relation
else:<|fim▁hole|>
if model==Category:
cat_list=Category.objects.filter(pk__in=list(pk_set))
for cat in cat_list:
function(cat,instance)
return
target_list=model.objects.filter(pk__in=list(pk_set))
for target in target_list:
function(instance,target)
m2m_changed.connect(categorizedobjectcategoryrelation_m2m_changed_handler,sender=Book.categories.through)
# @receiver(django.db.models.signals.m2m_changed, sender=Article.categories.through)
# def modify_articlecategoryrelation_handler(sender, **kwargs):
# print "Modify",kwargs["instance"],"with action:",kwargs["action"],kwargs["model"],kwargs["pk_set"]<|fim▁end|> | return |
<|file_name|>websockets_chat_async.py<|end_file_name|><|fim▁begin|>#!./uwsgi --http-socket :9090 --async 100 ...
# same chat example but using uwsgi async api
# for pypy + continulets just run:
# uwsgi --http-socket :9090 --pypy-home /opt/pypy --pypy-wsgi-file tests/websockets_chat_async.py --pypy-eval "uwsgi_pypy_setup_continulets()" --async 100
import uwsgi
import time
import redis
import sys
def application(env, sr):
ws_scheme = 'ws'
if 'HTTPS' in env or env['wsgi.url_scheme'] == 'https':
ws_scheme = 'wss'
if env['PATH_INFO'] == '/':
sr('200 OK', [('Content-Type', 'text/html')])
output = """
<html>
<head>
<script language="Javascript">
var s = new WebSocket("%s://%s/foobar/");
s.onopen = function() {
alert("connected !!!");
s.send("ciao");
};
s.onmessage = function(e) {
var bb = document.getElementById('blackboard')
var html = bb.innerHTML;
bb.innerHTML = html + '<br/>' + e.data;
};
s.onerror = function(e) {
alert(e);
}
s.onclose = function(e) {
alert("connection closed");
}
function invia() {
var value = document.getElementById('testo').value;
s.send(value);
}
</script>
</head>
<body>
<h1>WebSocket</h1>
<input type="text" id="testo"/>
<input type="button" value="invia" onClick="invia();"/>
<div id="blackboard" style="width:640px;height:480px;background-color:black;color:white;border: solid 2px red;overflow:auto">
</div>
</body>
</html>
""" % (ws_scheme, env['HTTP_HOST'])
if sys.version_info[0] > 2:<|fim▁hole|> elif env['PATH_INFO'] == '/favicon.ico':
return ""
elif env['PATH_INFO'] == '/foobar/':
uwsgi.websocket_handshake(env['HTTP_SEC_WEBSOCKET_KEY'], env.get('HTTP_ORIGIN', ''))
print("websockets...")
r = redis.StrictRedis(host='localhost', port=6379, db=0)
channel = r.pubsub()
channel.subscribe('foobar')
websocket_fd = uwsgi.connection_fd()
redis_fd = channel.connection._sock.fileno()
while True:
uwsgi.wait_fd_read(websocket_fd, 3)
uwsgi.wait_fd_read(redis_fd)
uwsgi.suspend()
fd = uwsgi.ready_fd()
if fd > -1:
if fd == websocket_fd:
msg = uwsgi.websocket_recv_nb()
if msg:
r.publish('foobar', msg)
elif fd == redis_fd:
msg = channel.parse_response()
print(msg)
# only interested in user messages
t = 'message'
if sys.version_info[0] > 2:
t = b'message'
if msg[0] == t:
uwsgi.websocket_send("[%s] %s" % (time.time(), msg))
else:
# on timeout call websocket_recv_nb again to manage ping/pong
msg = uwsgi.websocket_recv_nb()
if msg:
r.publish('foobar', msg)<|fim▁end|> | return output.encode('latin1')
return output |
<|file_name|>os_solaris.inline.hpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef OS_SOLARIS_VM_OS_SOLARIS_INLINE_HPP
#define OS_SOLARIS_VM_OS_SOLARIS_INLINE_HPP
#include "runtime/atomic.inline.hpp"
#include "runtime/os.hpp"
#ifdef TARGET_OS_ARCH_solaris_x86
# include "orderAccess_solaris_x86.inline.hpp"
#endif
#ifdef TARGET_OS_ARCH_solaris_sparc
# include "orderAccess_solaris_sparc.inline.hpp"
#endif
// System includes
#include <sys/param.h>
#include <dlfcn.h>
#include <sys/socket.h>
#include <sys/poll.h>
#include <sys/filio.h>
#include <unistd.h>
#include <netdb.h>
#include <setjmp.h>
inline const char* os::file_separator() { return "/"; }
inline const char* os::line_separator() { return "\n"; }
inline const char* os::path_separator() { return ":"; }
// File names are case-sensitive on windows only
inline int os::file_name_strcmp(const char* s1, const char* s2) {
return strcmp(s1, s2);
}
inline bool os::uses_stack_guard_pages() {
return true;
}
inline bool os::allocate_stack_guard_pages() {
assert(uses_stack_guard_pages(), "sanity check");
int r = thr_main() ;
guarantee (r == 0 || r == 1, "CR6501650 or CR6493689") ;
return r;
}
// On Solaris, reservations are made on a page by page basis, nothing to do.
inline void os::pd_split_reserved_memory(char *base, size_t size,
size_t split, bool realloc) {
}
// Bang the shadow pages if they need to be touched to be mapped.
inline void os::bang_stack_shadow_pages() {
}
inline void os::dll_unload(void *lib) { ::dlclose(lib); }
inline DIR* os::opendir(const char* dirname) {
assert(dirname != NULL, "just checking");
return ::opendir(dirname);
}
inline int os::readdir_buf_size(const char *path) {
int size = pathconf(path, _PC_NAME_MAX);
return (size < 0 ? MAXPATHLEN : size) + sizeof(dirent) + 1;
}
inline struct dirent* os::readdir(DIR* dirp, dirent* dbuf) {
assert(dirp != NULL, "just checking");
#if defined(_LP64) || defined(_GNU_SOURCE) || _FILE_OFFSET_BITS==64
dirent* p;
int status;
if((status = ::readdir_r(dirp, dbuf, &p)) != 0) {
errno = status;
return NULL;
} else
return p;
#else // defined(_LP64) || defined(_GNU_SOURCE) || _FILE_OFFSET_BITS==64
return ::readdir_r(dirp, dbuf);
#endif // defined(_LP64) || defined(_GNU_SOURCE) || _FILE_OFFSET_BITS==64
}
inline int os::closedir(DIR *dirp) {
assert(dirp != NULL, "argument is NULL");
return ::closedir(dirp);
}
//////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
// macros for interruptible io and system calls and system call restarting
#define _INTERRUPTIBLE(_setup, _cmd, _result, _thread, _clear, _before, _after, _int_enable) \
do { \
_setup; \
_before; \
OSThread* _osthread = _thread->osthread(); \
if (_int_enable && _thread->has_last_Java_frame()) { \
/* this is java interruptible io stuff */ \
if (os::is_interrupted(_thread, _clear)) { \
os::Solaris::bump_interrupted_before_count(); \
_result = OS_INTRPT; \
} else { \
/* _cmd always expands to an assignment to _result */ \
if ((_cmd) < 0 && errno == EINTR \
&& os::is_interrupted(_thread, _clear)) { \
os::Solaris::bump_interrupted_during_count(); \
_result = OS_INTRPT; \
} \
} \
} else { \
/* this is normal blocking io stuff */ \
_cmd; \
} \
_after; \
} while(false)
// Interruptible io support + restarting of interrupted system calls
#ifndef ASSERT
#define INTERRUPTIBLE(_cmd, _result, _clear) do { \
_INTERRUPTIBLE( JavaThread* _thread = (JavaThread*)ThreadLocalStorage::thread(),_result = _cmd, _result, _thread, _clear, , , UseVMInterruptibleIO); \
} while((_result == OS_ERR) && (errno == EINTR))
#else
// This adds an assertion that it is only called from thread_in_native
// The call overhead is skipped for performance in product mode
#define INTERRUPTIBLE(_cmd, _result, _clear) do { \
_INTERRUPTIBLE(JavaThread* _thread = os::Solaris::setup_interruptible_native(), _result = _cmd, _result, _thread, _clear, , os::Solaris::cleanup_interruptible_native(_thread), UseVMInterruptibleIO ); \
} while((_result == OS_ERR) && (errno == EINTR))
#endif
// Used for calls from _thread_in_vm, not from _thread_in_native
#define INTERRUPTIBLE_VM(_cmd, _result, _clear) do { \
_INTERRUPTIBLE(JavaThread* _thread = os::Solaris::setup_interruptible(), _result = _cmd, _result, _thread, _clear, , os::Solaris::cleanup_interruptible(_thread), UseVMInterruptibleIO ); \
} while((_result == OS_ERR) && (errno == EINTR))
/* Use NORESTART when the system call cannot return EINTR, when something other
than a system call is being invoked, or when the caller must do EINTR
handling. */
#ifndef ASSERT
#define INTERRUPTIBLE_NORESTART(_cmd, _result, _clear) \
_INTERRUPTIBLE( JavaThread* _thread = (JavaThread*)ThreadLocalStorage::thread(),_result = _cmd, _result, _thread, _clear, , , UseVMInterruptibleIO)
#else
// This adds an assertion that it is only called from thread_in_native
// The call overhead is skipped for performance in product mode
#define INTERRUPTIBLE_NORESTART(_cmd, _result, _clear) \
_INTERRUPTIBLE(JavaThread* _thread = os::Solaris::setup_interruptible_native(), _result = _cmd, _result, _thread, _clear, , os::Solaris::cleanup_interruptible_native(_thread), UseVMInterruptibleIO )
#endif
// Don't attend to UseVMInterruptibleIO. Always allow interruption.
// Also assumes that it is called from the _thread_blocked state.
// Used by os_sleep().
#define INTERRUPTIBLE_NORESTART_VM_ALWAYS(_cmd, _result, _thread, _clear) \
_INTERRUPTIBLE(os::Solaris::setup_interruptible_already_blocked(_thread), _result = _cmd, _result, _thread, _clear, , , true )
#define INTERRUPTIBLE_RETURN_INT(_cmd, _clear) do { \
int _result; \
do { \
INTERRUPTIBLE(_cmd, _result, _clear); \
} while((_result == OS_ERR) && (errno == EINTR)); \
return _result; \
} while(false)
#define INTERRUPTIBLE_RETURN_INT_VM(_cmd, _clear) do { \
int _result; \
do { \
INTERRUPTIBLE_VM(_cmd, _result, _clear); \
} while((_result == OS_ERR) && (errno == EINTR)); \
return _result; \
} while(false)
#define INTERRUPTIBLE_RETURN_INT_NORESTART(_cmd, _clear) do { \
int _result; \
INTERRUPTIBLE_NORESTART(_cmd, _result, _clear); \
return _result; \
} while(false)
/* Use the RESTARTABLE macros when interruptible io is not needed */
#define RESTARTABLE(_cmd, _result) do { \
do { \
_result = _cmd; \
} while((_result == OS_ERR) && (errno == EINTR)); \
} while(false)
#define RESTARTABLE_RETURN_INT(_cmd) do { \
int _result; \
RESTARTABLE(_cmd, _result); \
return _result; \
} while(false)
inline bool os::numa_has_static_binding() { return false; }
inline bool os::numa_has_group_homing() { return true; }
inline int os::socket(int domain, int type, int protocol) {
return ::socket(domain, type, protocol);
}
inline int os::listen(int fd, int count) {
if (fd < 0) return OS_ERR;
return ::listen(fd, count);<|fim▁hole|>
inline int os::socket_shutdown(int fd, int howto){
return ::shutdown(fd, howto);
}
inline int os::get_sock_name(int fd, struct sockaddr* him, socklen_t* len){
return ::getsockname(fd, him, len);
}
inline int os::get_host_name(char* name, int namelen){
return ::gethostname(name, namelen);
}
inline struct hostent* os::get_host_by_name(char* name) {
return ::gethostbyname(name);
}
inline int os::get_sock_opt(int fd, int level, int optname,
char* optval, socklen_t* optlen) {
return ::getsockopt(fd, level, optname, optval, optlen);
}
inline int os::set_sock_opt(int fd, int level, int optname,
const char *optval, socklen_t optlen) {
return ::setsockopt(fd, level, optname, optval, optlen);
}
#endif // OS_SOLARIS_VM_OS_SOLARIS_INLINE_HPP<|fim▁end|> | } |
<|file_name|>slide.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#include <nt2/memory/include/functions/slide.hpp>
#include <nt2/memory/include/functions/scalar/slide.hpp>
#include <nt2/memory/include/functions/simd/slide.hpp>
#endif<|fim▁end|> | #ifndef NT2_INCLUDE_FUNCTIONS_SLIDE_HPP_INCLUDED
#define NT2_INCLUDE_FUNCTIONS_SLIDE_HPP_INCLUDED
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
import os
from django_nyt import VERSION
from setuptools import setup, find_packages
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def get_path(fname):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), fname)
def read(fname):
return open(get_path(fname)).read()
packages = find_packages()
try:
import pypandoc
long_description = pypandoc.convert(get_path('README.md'), 'rst')
long_description = long_description.split(
'<!---Illegal PyPi RST data -->')[0]
f = open(get_path('README.rst'), 'w')
f.write(long_description)
f.close()
print("Successfully converted README.md to README.rst")
except (IOError, ImportError):
# No long description... but nevermind, it's only for PyPi uploads.
long_description = ""
setup(
name="django-nyt",
version=VERSION,
author="Benjamin Bach",
author_email="[email protected]",
url="https://github.com/benjaoming/django-nyt",
description="A pluggable notification system written for the Django framework.",
license="Apache License 2.0",
keywords="django notification system",
packages=find_packages(exclude=["testproject", "testproject.*"]),
# long_description=long_description,
zip_safe=False,
install_requires=read('requirements.txt').split("\n"),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
include_package_data=True,
)<|fim▁end|> | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
# -*- coding: utf-8 -*- |
<|file_name|>link.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from .generic import *
from criacao.forms import *
from criacao.models import *
from gerenciamento.models import *
logger = logging.getLogger(__name__)
class LinkView(GenericView):
def criar(self, request):
if request.method == 'POST':
try:
name = request.POST['name']
url = request.POST['url']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Está faltando alguma informação, por favor, verifique os campos!',
}
}
else:
link = Link(name=name, url=url)
try:
link.save()
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-success' : 'Link criado com sucesso!',
'redirect' : '/criacao/link/listar/'
},
}
finally:
return data
else:
museu, museu_nome = UTIL_informacoes_museu()
form = LinkForm()
data = {
'template' : {
'request' : request,
'museu_nome' : museu_nome,
'form' : form,
},
}
return data
def visualizar(self, request):
try:
pk = self.kwargs['key']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar essa visualização.',
}
}
else:
museu, museu_nome = UTIL_informacoes_museu()
link = Link.objects.get(pk=pk)
data = {
'template' : {
'request' : request,
'museu_nome' : museu_nome,
'link' : link,<|fim▁hole|> finally:
return data
def editar(self, request):
if request.method == 'POST':
try:
pk = self.kwargs['key']
name = request.POST['name']
url = request.POST['url']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar esta edição!',
}
}
else:
link = Link.objects.get(pk=pk);
link.name=name
link.url=url
link.save()
data = {
'leftover' : {
'alert-success' : 'Link editada com sucesso!',
'redirect' : '/criacao/link/listar/'
},
}
finally:
return data
else:
try:
pk = self.kwargs['key']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar essa edição!',
}
}
else:
museu, museu_nome = UTIL_informacoes_museu()
link = Link.objects.get(pk=pk);
form = LinkForm(initial={
'name': link.name,
'url': link.url,
})
data = {
'template' : {
'request' : request,
'museu_nome' : museu_nome,
'link' : link,
'form' : form,
},
}
finally:
return data
def excluir(self, request):
try:
pk = self.kwargs['key']
except Exception, e:
logger.error(str(e))
data = {
'leftover' : {
'alert-error' : 'Não foi possível processar essa exclusão!',
}
}
else:
Link.objects.get(pk=pk).delete()
data = {
'leftover' : {
'alert-success' : 'Link deletado com sucesso!',
},
}
finally:
return data
def listar(self, request):
museu, museu_nome = UTIL_informacoes_museu()
links = Link.objects.all()
try:
page = int(self.kwargs['key'])
except:
page = 1
finally:
links = paginate(obj=links, page=page, num_per_page=8)
data = {
'template' : {
'request' : request,
'museu' : museu,
'museu_nome' : museu_nome,
'links' : links,
},
}
return data<|fim▁end|> | },
} |
<|file_name|>test_rocket1.py<|end_file_name|><|fim▁begin|>from rocketlander import RocketLander
from constants import LEFT_GROUND_CONTACT, RIGHT_GROUND_CONTACT
import numpy as np
import pyglet
if __name__ == "__main__":<|fim▁hole|> 'Starting Y-Pos Constant': 1,
'Initial Force': 'random'} # (6000, -10000)}
env = RocketLander(settings)
s = env.reset()
left_or_right_barge_movement = np.random.randint(0, 2)
for i in range(50):
a = [10.0, 1.0, 1.0]
s, r, done, info = env.step(a)
# -------------------------------------
# Optional render
env.render()
# Draw the target
buffer = pyglet.image.get_buffer_manager().get_color_buffer()
image_data = buffer.get_image_data()
if i % 5 == 0:
image_data.save(filename='frames/rocket-%04d.png' % i)
env.draw_marker(env.landing_coordinates[0], env.landing_coordinates[1])
# Refresh render
env.refresh(render=False)
# When should the barge move? Water movement, dynamics etc can be simulated here.
if s[LEFT_GROUND_CONTACT] == 0 and s[RIGHT_GROUND_CONTACT] == 0:
env.move_barge_randomly(0.05, left_or_right_barge_movement)
# Random Force on rocket to simulate wind.
env.apply_random_x_disturbance \
(epsilon=0.005, \
left_or_right=left_or_right_barge_movement)
env.apply_random_y_disturbance(epsilon=0.005)
# Touch down or pass abs(THETA_LIMIT)
if done: break<|fim▁end|> | # Settings holds all the settings for the rocket lander environment.
settings = {'Side Engines': True,
'Clouds': True,
'Vectorized Nozzle': True, |
<|file_name|>test_JMXDataSource.py<|end_file_name|><|fim▁begin|>##############################################################################
#
# Copyright (C) Zenoss, Inc. 2015, all rights reserved.
#
# This content is made available according to terms specified in
# License.zenoss under the directory where your Zenoss product is installed.
#
##############################################################################
<|fim▁hole|>from Products.ZenTestCase.BaseTestCase import BaseTestCase
from ZenPacks.zenoss.ZenJMX.datasources.JMXDataSource import JMXDataSource
class TestJMXDataSource(BaseTestCase):
def afterSetUp(self):
self.ds = JMXDataSource(id='1')
def test_getDescription(self):
self.assertEqual(self.ds.getDescription(), '${dev/id}')
def test_getProtocols(self):
self.assertEqual(self.ds.getProtocols(), ['REMOTING-JMX', 'RMI', 'JMXMP'])
def test_zmanage_editProperties(self):
with self.assertRaises(AttributeError):
self.ds.zmanage_editProperties()<|fim▁end|> | |
<|file_name|>InputTransformer.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatchevents.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains the parameters needed for you to provide custom input to a target based on one or more pieces of data
* extracted from the event.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/events-2015-10-07/InputTransformer" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InputTransformer implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*/
private java.util.Map<String, String> inputPathsMap;
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*/
private String inputTemplate;
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*
* @return Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON
* path.
*/
public java.util.Map<String, String> getInputPathsMap() {
return inputPathsMap;
}
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*
* @param inputPathsMap
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON
* path.
*/
public void setInputPathsMap(java.util.Map<String, String> inputPathsMap) {
this.inputPathsMap = inputPathsMap;
}
/**
* <p>
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON path.
* </p>
*
* @param inputPathsMap
* Map of JSON paths to be extracted from the event. These are key-value pairs, where each value is a JSON
* path.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputTransformer withInputPathsMap(java.util.Map<String, String> inputPathsMap) {
setInputPathsMap(inputPathsMap);
return this;
}
public InputTransformer addInputPathsMapEntry(String key, String value) {
if (null == this.inputPathsMap) {
this.inputPathsMap = new java.util.HashMap<String, String>();
}
if (this.inputPathsMap.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.inputPathsMap.put(key, value);
return this;
}
/**
* Removes all the entries added into InputPathsMap.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputTransformer clearInputPathsMapEntries() {
this.inputPathsMap = null;
return this;
}
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*
* @param inputTemplate
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the
* data sent to the target.
*/
public void setInputTemplate(String inputTemplate) {
this.inputTemplate = inputTemplate;
}
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*
* @return Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the
* data sent to the target.
*/
public String getInputTemplate() {
return this.inputTemplate;
}
/**
* <p>
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the data
* sent to the target.
* </p>
*
* @param inputTemplate
* Input template where you can use the values of the keys from <code>InputPathsMap</code> to customize the
* data sent to the target.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InputTransformer withInputTemplate(String inputTemplate) {
setInputTemplate(inputTemplate);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInputPathsMap() != null)
sb.append("InputPathsMap: ").append(getInputPathsMap()).append(",");
if (getInputTemplate() != null)
sb.append("InputTemplate: ").append(getInputTemplate());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;<|fim▁hole|> if (other.getInputPathsMap() == null ^ this.getInputPathsMap() == null)
return false;
if (other.getInputPathsMap() != null && other.getInputPathsMap().equals(this.getInputPathsMap()) == false)
return false;
if (other.getInputTemplate() == null ^ this.getInputTemplate() == null)
return false;
if (other.getInputTemplate() != null && other.getInputTemplate().equals(this.getInputTemplate()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInputPathsMap() == null) ? 0 : getInputPathsMap().hashCode());
hashCode = prime * hashCode + ((getInputTemplate() == null) ? 0 : getInputTemplate().hashCode());
return hashCode;
}
@Override
public InputTransformer clone() {
try {
return (InputTransformer) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.cloudwatchevents.model.transform.InputTransformerMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}<|fim▁end|> |
if (obj instanceof InputTransformer == false)
return false;
InputTransformer other = (InputTransformer) obj; |
<|file_name|>TriAppSkeleton.cpp<|end_file_name|><|fim▁begin|>// TriAppSkeleton.cpp
#ifdef _WIN32
# define WINDOWS_LEAN_AND_MEAN
# define NOMINMAX
# include <windows.h>
#endif
#include <GL/glew.h>
#include "AppSkeleton.h"
#include "GL/GLUtils.h"
#include "GL/ShaderFunctions.h"
#include "MatrixMath.h"
#include "utils/Logger.h"
#include "paramgl.h"
#include "TriAppSkeleton.h"
TriAppSkeleton::TriAppSkeleton()
: AppSkeleton()
, which_button(-1)
{
g_cameraLocation = make_float3(0.0f, -3.0f, 0.0f);
g_lookVector = make_float3(0.0f, 1.0f, 0.0f);
g_upVector = make_float3(0.0f, 0.0f, 1.0f);
g_rotation = make_float3(0.0f, 90.0f, 0.0f);
g_distance = 0.0f;
g_viewAngle = 45.0;
which_button = -1;
modifier_mode = 0;
g_progBasic = 0;
g_progOverlay = 0;
}
TriAppSkeleton::~TriAppSkeleton()
{
}
bool TriAppSkeleton::initGL(int argc, char **argv)
{
bool ret = AppSkeleton::initGL(argc, argv); /// calls _InitShaders
return ret;
}
/// Shaders must be created *after* the OpenGL context is created.
/// This virtual function will be called by AppSkeleton::initGL.
void TriAppSkeleton::_InitShaders()
{
// InitShaders()
LOG_INFO("Initializing shaders.");
{
g_progBasic = makeShaderByName("basic");
g_progOverlay = makeShaderByName("overlay");
}
}
void TriAppSkeleton::drawObject() const
{
GLfloat vVertices[] = { 0.0f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f };
GLfloat vColors[] = { 1.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 1.0f };
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vVertices);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, vColors);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
CHECK_GL_ERROR_MACRO();
}
void TriAppSkeleton::display() const
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glUseProgram(g_progBasic);
{
/// Set up our 3D transformation matrices
float modelviewMatrix[16];
float projectionMatrix[16];
float3 origin = g_cameraLocation + g_lookVector;
glhLookAtf2(modelviewMatrix,
g_cameraLocation,
origin,
g_upVector);
glhTranslate(modelviewMatrix, 0.0f, g_distance, 0.0f);
glhRotate(modelviewMatrix, g_rotation.x, 0.0f, 0.0f, 1.0f);
glhRotate(modelviewMatrix, g_rotation.y, 1.0f, 0.0f, 0.0f);
glhPerspectivef2(projectionMatrix, g_viewAngle, (float)g_windowWidth/(float)g_windowHeight, 0.004, 500.0);
glUniformMatrix4fv(getUniLoc(g_progBasic, "mvmtx"), 1, false, modelviewMatrix);
glUniformMatrix4fv(getUniLoc(g_progBasic, "prmtx"), 1, false, projectionMatrix);
drawObject();
}
glUseProgram(0);
CHECK_GL_ERROR_MACRO();
}
void TriAppSkeleton::mouseDown(int button, int state, int x, int y)
{
which_button = button;
oldx = newx = x;
oldy = newy = y;
if (state == 0) // 0 == GLFW_RELEASE<|fim▁hole|>
void TriAppSkeleton::mouseMove(int x, int y)
{
int mmx, mmy;
float thresh = 4;
if (modifier_mode & GLUT_ACTIVE_SHIFT) { thresh /= 0.01f; }
oldx = newx;
oldy = newy;
newx = x;
newy = y;
mmx = x-oldx;
mmy = y-oldy;
if (which_button == GLUT_LEFT_BUTTON) //GLFW_MOUSE_BUTTON_1
{
g_rotation.x += (float)mmx/thresh;
g_rotation.y += (float)mmy/thresh;
}
else if (which_button == GLUT_RIGHT_BUTTON)
{
g_distance += (float)mmx/thresh;
}
}
void TriAppSkeleton::keyboard(int key, int x, int y)
{
AppSkeleton::keyboard(key, x, y);
}<|fim▁end|> | {
which_button = -1;
}
} |
<|file_name|>systemviews.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import importlib
from django.apps import apps
def get_seo_views(metadata_class):
return get_view_names(metadata_class._meta.seo_views)
def get_view_names(seo_views):
output = []
for name in seo_views:
try:
app = apps.get_app_config(name).models_module
except:
output.append(name)
else:
app_name = app.__name__.split(".")[:-1]
app_name.append("urls")
try:
urls = importlib.import_module(".".join(app_name)).urlpatterns
except (ImportError, AttributeError):
output.append(name)<|fim▁hole|> if getattr(url, 'name', None):
output.append(url.name)
return output<|fim▁end|> | else:
for url in urls: |
<|file_name|>tests.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>};<|fim▁end|> |
exports.fixShould = function fixShould(str) {
var segs = str.split('var should = require(\'should\');');
return segs.join('require(\'should\');'); |
<|file_name|>TestDatastream.py<|end_file_name|><|fim▁begin|>import os
import unittest
import random
import xmlrunner
host = os.environ['FALKONRY_HOST_URL'] # host url
token = os.environ['FALKONRY_TOKEN'] # auth token
class TestDatastream(unittest.TestCase):
def setUp(self):
self.created_datastreams = []
self.fclient = FClient(host=host, token=token, options=None)
pass
# Create datastream without any signals
def test_create_standalone_datastream(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for narrow/historian style data from a single entity
def test_create_datastream_narrow_style_single_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
signal.set_valueIdentifier("value")
signal.set_signalIdentifier("signal")
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for narrow/historian style data from a multiple entities
def test_create_datastream_narrow_style_multiple_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
signal.set_signalIdentifier("signal") # set signal identifier
signal.set_valueIdentifier("value") # set value identifier
field.set_entityIdentifier("entity") # set entity identifier
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
self.assertEqual(signalResponse.get_signalIdentifier(), signal.get_signalIdentifier(), 'Invalid signal identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for wide style data from a single entity
def test_create_datastream_wide_style_single_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
input1 = Schemas.Input()
input2 = Schemas.Input()
input3 = Schemas.Input()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
input1.set_name("Signal1") # set name of input signal
input1.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input1.set_event_type("Samples") # set event type of input signal
input2.set_name("Signal2") # set name of input signal
input2.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input2.set_event_type("Samples") # set event type of input signal
input3.set_name("Signal3") # set name of input signal
input3.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input3.set_event_type("Samples") # set event type of input signal
inputs = []
inputs.append(input1)
inputs.append(input2)
inputs.append(input3)
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
datastream.set_inputs(inputs)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
inputs = response.get_inputs()
self.assertEqual(isinstance(inputs, list), True, 'Invalid inputs object after creation')
self.assertEqual(len(inputs), 3, 'Invalid inputs object after creation')
inputResp1 = inputs.__getitem__(0)
inputResp2 = inputs.__getitem__(1)
inputResp3 = inputs.__getitem__(2)
self.assertEqual(inputResp1.get_name(), input1.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp1.get_value_type(), input1.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp2.get_name(), input2.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp2.get_value_type(), input2.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp3.get_name(), input3.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp3.get_value_type(), input3.get_value_type(),'Invalid input value type after object creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for wide style data from a multiple entities
def test_create_datastream_wide_style_multiple_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
input1 = Schemas.Input()
input2 = Schemas.Input()
input3 = Schemas.Input()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
input1.set_name("Signal1") # set name of input signal
input1.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input1.set_event_type("Samples") # set event type of input signal
input2.set_name("Signal2") # set name of input signal
input2.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input2.set_event_type("Samples") # set event type of input signal
input3.set_name("Signal3") # set name of input signal
input3.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input3.set_event_type("Samples") # set event type of input signal
inputs = []
inputs.append(input1)
inputs.append(input2)
inputs.append(input3)
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
field.set_signal(signal) # set signal in field
field.set_entityIdentifier("entity")
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
datastream.set_inputs(inputs)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
inputs = response.get_inputs()
self.assertEqual(isinstance(inputs, list), True, 'Invalid inputs object after creation')
self.assertEqual(len(inputs), 3, 'Invalid inputs object after creation')
inputResp1 = inputs.__getitem__(0)
inputResp2 = inputs.__getitem__(1)
inputResp3 = inputs.__getitem__(2)
self.assertEqual(inputResp1.get_name(), input1.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp1.get_value_type(), input1.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp2.get_name(), input2.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp2.get_value_type(), input2.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp3.get_name(), input3.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp3.get_value_type(), input3.get_value_type(),'Invalid input value type after object creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Retrieve Datastreams
def test_get_datastream_list(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
# get datastream list
datastreamList = self.fclient.get_datastreams()
self.assertEqual(isinstance(datastreamList, list), True, 'Invalid datastreamlist in response')
self.assertEqual(len(datastreamList) > 0, True, 'No datastreams in get response')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Retrieve Datastream by Id
def test_get_datastream_by_id(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")<|fim▁hole|> field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
# get datastream list
datastreamResp = self.fclient.get_datastream(response.get_id())
self.assertEqual(isinstance(datastreamResp,Schemas.Datastream), True, 'Invalid time object after creation')
self.assertEqual(response.get_id(), datastreamResp.get_id(), 'Invalid id of datastream after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Delete Datastream
def test_delete_datastream_by_id(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
# delete datastream
try:
self.fclient.delete_datastream(response.get_id())
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot delete datastream')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream microseconds precision
def test_create_datastream_micro_second_precision(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datastream.set_time_precision('micro') # set 'micro' for microseconds precision
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
signal.set_signalIdentifier("signal")
signal.set_valueIdentifier("value")
field.set_entityIdentifier("entity")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_signalIdentifier(), "signal", 'Invalid signal identifier object after creation')
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
self.assertEqual(response.get_time_precision(), datastream.get_time_precision(), 'Invalid time precision after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for batch identifier
def test_create_datastream_with_batch_identifier(self):
fclient = FClient(host=host, token=token,options=None)
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
input1 = Schemas.Input()
input2 = Schemas.Input()
input3 = Schemas.Input()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
input1.set_name("Signal1") # set name of input signal
input1.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input1.set_event_type("Samples") # set event type of input signal
input2.set_name("Signal2") # set name of input signal
input2.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input2.set_event_type("Samples") # set event type of input signal
input3.set_name("Signal3") # set name of input signal
input3.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input3.set_event_type("Samples") # set event type of input signal
inputs = []
inputs.append(input1)
inputs.append(input2)
inputs.append(input3)
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
field.set_signal(signal) # set signal in field
field.set_batchIdentifier("batch") # set batchIdentifier in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
datastream.set_inputs(inputs)
try:
# create Datastream
response = fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
self.assertEqual(fieldResponse.get_batchIdentifier(),"batch",'Invalid batchIdentifier after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
inputs = response.get_inputs()
self.assertEqual(isinstance(inputs, list), True, 'Invalid inputs object after creation')
self.assertEqual(len(inputs), 3, 'Invalid inputs object after creation')
inputResp1 = inputs.__getitem__(0)
inputResp2 = inputs.__getitem__(1)
inputResp3 = inputs.__getitem__(2)
self.assertEqual(inputResp1.get_name(), input1.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp1.get_value_type(), input1.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp2.get_name(), input2.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp2.get_value_type(), input2.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp3.get_name(), input3.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp3.get_value_type(), input3.get_value_type(),'Invalid input value type after object creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
def tearDown(self): # teardown
for ds in self.created_datastreams:
try:
self.fclient.delete_datastream(ds)
except Exception as e:
print(exception_handler(e))
pass
if __name__ == '__main__':
if __package__ is None:
import sys
from os import path
sys.path.append(
path.dirname(
path.dirname(
path.abspath(__file__)
)
)
)
from falkonryclient import schemas as Schemas
from falkonryclient import client as FClient
from falkonryclient.helper.utils import exception_handler
else:
from ..falkonryclient import schemas as Schemas
from ..falkonryclient import client as FClient
from ..falkonryclient.helper.utils import exception_handler
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output='out'),
failfast=False, buffer=False, catchbreak=False)
else:
from falkonryclient import schemas as Schemas
from falkonryclient import client as FClient
from falkonryclient.helper.utils import exception_handler<|fim▁end|> | time.set_identifier("time")
time.set_format("iso_8601")
signal.set_signalIdentifier("signal")
signal.set_valueIdentifier("value") |
<|file_name|>input.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use script_traits::MouseButton;
use std::path::Path;
use std::mem::size_of;
use std::mem::transmute;
use std::mem::zeroed;
use std::os::unix::io::AsRawFd;
use std::fs::File;
use std::thread;
use std::sync::mpsc::Sender;
use std::io::Read;
use geom::point::Point2D;
use errno::errno;
use libc::c_int;
use libc::c_long;
use libc::time_t;
use compositing::windowing::WindowEvent;
use compositing::windowing::MouseWindowEvent;
extern {
// XXX: no variadic form in std libs?
fn ioctl(fd: c_int, req: c_int, ...) -> c_int;
}
#[repr(C)]
struct linux_input_event {
sec: time_t,
msec: c_long,
evt_type: u16,
code: u16,
value: i32,
}
#[repr(C)]
struct linux_input_absinfo {
value: i32,
minimum: i32,
maximum: i32,
fuzz: i32,
flat: i32,
resolution: i32,
}
const IOC_NONE: c_int = 0;
const IOC_WRITE: c_int = 1;
const IOC_READ: c_int = 2;
fn ioc(dir: c_int, ioctype: c_int, nr: c_int, size: c_int) -> c_int {
dir << 30 | size << 16 | ioctype << 8 | nr
}
fn ev_ioc_g_abs(abs: u16) -> c_int {
ioc(IOC_READ, 'E' as c_int, (0x40 + abs) as i32, size_of::<linux_input_absinfo>() as i32)
}
const EV_SYN: u16 = 0;
const EV_ABS: u16 = 3;
const EV_REPORT: u16 = 0;
const ABS_MT_SLOT: u16 = 0x2F;
const ABS_MT_TOUCH_MAJOR: u16 = 0x30;
const ABS_MT_TOUCH_MINOR: u16 = 0x31;
const ABS_MT_WIDTH_MAJOR: u16 = 0x32;
const ABS_MT_WIDTH_MINOR: u16 = 0x33;
const ABS_MT_ORIENTATION: u16 = 0x34;
const ABS_MT_POSITION_X: u16 = 0x35;
const ABS_MT_POSITION_Y: u16 = 0x36;
const ABS_MT_TRACKING_ID: u16 = 0x39;
struct InputSlot {
tracking_id: i32,
x: i32,
y: i32,
}
fn dist(x1: i32, x2: i32, y1: i32, y2: i32) -> f32 {
let deltaX = (x2 - x1) as f32;
let deltaY = (y2 - y1) as f32;
(deltaX * deltaX + deltaY * deltaY).sqrt()
}
fn read_input_device(device_path: &Path,
sender: &Sender<WindowEvent>) {
let mut device = match File::open(device_path) {
Ok(dev) => dev,
Err(e) => {
println!("Couldn't open device! {}", e);
return;
},
};
let fd = device.as_raw_fd();
let mut x_info: linux_input_absinfo = unsafe { zeroed() };
let mut y_info: linux_input_absinfo = unsafe { zeroed() };
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_X), &mut x_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_X info {} {}", ret, errno());
}
}
unsafe {
let ret = ioctl(fd, ev_ioc_g_abs(ABS_MT_POSITION_Y), &mut y_info);
if ret < 0 {
println!("Couldn't get ABS_MT_POSITION_Y info {} {}", ret, errno());
}
}
let touchWidth = x_info.maximum - x_info.minimum;
let touchHeight = y_info.maximum - y_info.minimum;
println!("xMin: {}, yMin: {}, touchWidth: {}, touchHeight: {}",
x_info.minimum, y_info.minimum, touchWidth, touchHeight);
// XXX: Why isn't size_of treated as constant?
// let buf: [u8; (16 * size_of::<linux_input_event>())];
let mut buf: [u8; (16 * 16)] = unsafe { zeroed() };
let mut slots: [InputSlot; 10] = unsafe { zeroed() };
for slot in slots.iter_mut() {
slot.tracking_id = -1;
}
let mut last_x = 0;
let mut last_y = 0;
let mut first_x = 0;
let mut first_y = 0;
let mut last_dist: f32 = 0f32;
let mut touch_count: i32 = 0;
let mut current_slot: usize = 0;
// XXX: Need to use the real dimensions of the screen
let screen_dist = dist(0, 480, 854, 0);
loop {
let read = match device.read(&mut buf) {
Ok(count) => {
assert!(count % size_of::<linux_input_event>() == 0,
"Unexpected input device read length!");
count
},
Err(e) => {
println!("Couldn't read device! {}", e);
return;
}
};
let count = read / size_of::<linux_input_event>();
let events: *mut linux_input_event = unsafe { transmute(buf.as_mut_ptr()) };
let mut tracking_updated = false;
for idx in 0..(count as isize) {
let event: &linux_input_event = unsafe { transmute(events.offset(idx)) };
match (event.evt_type, event.code) {
(EV_SYN, EV_REPORT) => {
let slotA = &slots[0];
if tracking_updated {
tracking_updated = false;
if slotA.tracking_id == -1 {
println!("Touch up");
let delta_x = slotA.x - first_x;
let delta_y = slotA.y - first_y;
let dist = delta_x * delta_x + delta_y * delta_y;
if dist < 16 {
let click_pt = Point2D::typed(slotA.x as f32, slotA.y as f32);
println!("Dispatching click!");<|fim▁hole|> WindowEvent::MouseWindowEventClass(
MouseWindowEvent::MouseUp(MouseButton::Left, click_pt))).ok().unwrap();
sender.send(
WindowEvent::MouseWindowEventClass(
MouseWindowEvent::Click(MouseButton::Left, click_pt))).ok().unwrap();
}
} else {
println!("Touch down");
last_x = slotA.x;
last_y = slotA.y;
first_x = slotA.x;
first_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
last_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
}
}
} else {
println!("Touch move x: {}, y: {}", slotA.x, slotA.y);
sender.send(
WindowEvent::Scroll(Point2D::typed((slotA.x - last_x) as f32, (slotA.y - last_y) as f32),
Point2D::typed(slotA.x, slotA.y))).ok().unwrap();
last_x = slotA.x;
last_y = slotA.y;
if touch_count >= 2 {
let slotB = &slots[1];
let cur_dist = dist(slotA.x, slotB.x, slotA.y, slotB.y);
println!("Zooming {} {} {} {}",
cur_dist, last_dist, screen_dist,
((screen_dist + (cur_dist - last_dist))/screen_dist));
sender.send(
WindowEvent::Zoom((screen_dist + (cur_dist - last_dist))/screen_dist)).ok().unwrap();
last_dist = cur_dist;
}
}
},
(EV_SYN, _) => println!("Unknown SYN code {}", event.code),
(EV_ABS, ABS_MT_SLOT) => {
if (event.value as usize) < slots.len() {
current_slot = event.value as usize;
} else {
println!("Invalid slot! {}", event.value);
}
},
(EV_ABS, ABS_MT_TOUCH_MAJOR) => (),
(EV_ABS, ABS_MT_TOUCH_MINOR) => (),
(EV_ABS, ABS_MT_WIDTH_MAJOR) => (),
(EV_ABS, ABS_MT_WIDTH_MINOR) => (),
(EV_ABS, ABS_MT_ORIENTATION) => (),
(EV_ABS, ABS_MT_POSITION_X) => {
slots[current_slot].x = event.value - x_info.minimum;
},
(EV_ABS, ABS_MT_POSITION_Y) => {
slots[current_slot].y = event.value - y_info.minimum;
},
(EV_ABS, ABS_MT_TRACKING_ID) => {
let current_id = slots[current_slot].tracking_id;
if current_id != event.value &&
(current_id == -1 || event.value == -1) {
tracking_updated = true;
if event.value == -1 {
touch_count -= 1;
} else {
touch_count += 1;
}
}
slots[current_slot].tracking_id = event.value;
},
(EV_ABS, _) => println!("Unknown ABS code {}", event.code),
(_, _) => println!("Unknown event type {}", event.evt_type),
}
}
}
}
pub fn run_input_loop(event_sender: &Sender<WindowEvent>) {
let sender = event_sender.clone();
thread::spawn(move || {
// XXX need to scan all devices and read every one.
let touchinputdev = Path::new("/dev/input/event0");
read_input_device(&touchinputdev, &sender);
});
}<|fim▁end|> | sender.send(
WindowEvent::MouseWindowEventClass(
MouseWindowEvent::MouseDown(MouseButton::Left, click_pt))).ok().unwrap();
sender.send( |
<|file_name|>FaceDetection+BackgroundReduction.py<|end_file_name|><|fim▁begin|>import numpy as np
import cv2
from matplotlib import pyplot as plt
face_cascade = cv2.CascadeClassifier('/home/tianyiz/user/601project/c/haarcascade_frontalface_alt.xml')
cap = cv2.VideoCapture(0)
fgbg = cv2.createBackgroundSubtractorMOG2()
while 1:
ret, img = cap.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
#Background reduce
fgmask = fgbg.apply(img)
cv2.imshow('Reduce',fgmask)
for (x,y,w,h) in faces:
print(x,y,w,h)
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2) <|fim▁hole|> k = cv2.waitKey(30) & 0xff
if k == 27:
break
cap.release()
cv2.destroyAllWindows()<|fim▁end|> | roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
cv2.imshow('img',img) |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package com.example.nm_gql_go_link_example;
import io.flutter.embedding.android.FlutterActivity;<|fim▁hole|><|fim▁end|> |
public class MainActivity extends FlutterActivity {
} |
<|file_name|>NamedThreadFactory.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2011 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.mogwee.executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Factory that sets the name of each thread it creates to {@code [name]-[id]}.
* This makes debugging stack traces much easier.
*/
public class NamedThreadFactory implements ThreadFactory
{
private final AtomicInteger count = new AtomicInteger(0);
private final String name;
public NamedThreadFactory(String name)
{
this.name = name;
}
@Override
public Thread newThread(final Runnable runnable)
{
Thread thread = new Thread(runnable);
thread.setName(name + "-" + count.incrementAndGet());
return thread;
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>config_handler.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
use crate::CoreError;
use directories::ProjectDirs;
use quic_p2p::Config as QuicP2pConfig;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
#[cfg(test)]
use std::fs;
use std::{
ffi::OsStr,
fs::File,
io::{self, BufReader},
path::PathBuf,
sync::Mutex,
};
const CONFIG_DIR_QUALIFIER: &str = "net";
const CONFIG_DIR_ORGANISATION: &str = "MaidSafe";
const CONFIG_DIR_APPLICATION: &str = "safe_core";
const CONFIG_FILE: &str = "safe_core.config";
const VAULT_CONFIG_DIR_APPLICATION: &str = "safe_vault";
const VAULT_CONNECTION_INFO_FILE: &str = "vault_connection_info.config";
lazy_static! {
static ref CONFIG_DIR_PATH: Mutex<Option<PathBuf>> = Mutex::new(None);
static ref DEFAULT_SAFE_CORE_PROJECT_DIRS: Option<ProjectDirs> = ProjectDirs::from(
CONFIG_DIR_QUALIFIER,
CONFIG_DIR_ORGANISATION,
CONFIG_DIR_APPLICATION,
);
static ref DEFAULT_VAULT_PROJECT_DIRS: Option<ProjectDirs> = ProjectDirs::from(
CONFIG_DIR_QUALIFIER,
CONFIG_DIR_ORGANISATION,
VAULT_CONFIG_DIR_APPLICATION,
);
}<|fim▁hole|>// `OsStr` is platform-native.
pub fn set_config_dir_path<P: AsRef<OsStr> + ?Sized>(path: &P) {
*unwrap!(CONFIG_DIR_PATH.lock()) = Some(From::from(path));
}
/// Configuration for safe-core.
#[derive(Clone, Debug, Default, Deserialize, Serialize, Eq, PartialEq)]
pub struct Config {
/// QuicP2p options.
pub quic_p2p: QuicP2pConfig,
/// Developer options.
pub dev: Option<DevConfig>,
}
#[cfg(any(target_os = "android", target_os = "androideabi", target_os = "ios"))]
fn check_config_path_set() -> Result<(), CoreError> {
if unwrap!(CONFIG_DIR_PATH.lock()).is_none() {
Err(CoreError::QuicP2p(quic_p2p::Error::Configuration(
"Boostrap cache directory not set".to_string(),
)))
} else {
Ok(())
}
}
impl Config {
/// Returns a new `Config` instance. Tries to read quic-p2p config from file.
pub fn new() -> Self {
let quic_p2p = Self::read_qp2p_from_file().unwrap_or_default();
Self {
quic_p2p,
dev: None,
}
}
fn read_qp2p_from_file() -> Result<QuicP2pConfig, CoreError> {
// First we read the default configuration file, and use a slightly modified default config
// if there is none.
let mut config: QuicP2pConfig = {
match read_config_file(dirs()?, CONFIG_FILE) {
Err(CoreError::IoError(ref err)) if err.kind() == io::ErrorKind::NotFound => {
// Bootstrap cache dir must be set on mobile platforms
// using set_config_dir_path
#[cfg(any(
target_os = "android",
target_os = "androideabi",
target_os = "ios"
))]
check_config_path_set()?;
let custom_dir =
if let Some(custom_path) = unwrap!(CONFIG_DIR_PATH.lock()).clone() {
Some(custom_path.into_os_string().into_string().map_err(|_| {
CoreError::from("Config path is not a valid UTF-8 string")
})?)
} else {
None
};
// If there is no config file, assume we are a client
QuicP2pConfig {
our_type: quic_p2p::OurType::Client,
bootstrap_cache_dir: custom_dir,
..Default::default()
}
}
result => result?,
}
};
// Then if there is a locally running Vault we add it to the list of know contacts.
if let Ok(node_info) = read_config_file(vault_dirs()?, VAULT_CONNECTION_INFO_FILE) {
let _ = config.hard_coded_contacts.insert(node_info);
}
Ok(config)
}
}
/// Extra configuration options intended for developers.
#[derive(Clone, Debug, Default, Deserialize, Serialize, Eq, PartialEq)]
pub struct DevConfig {
/// Switch off mutations limit in mock-vault.
pub mock_unlimited_coins: bool,
/// Use memory store instead of file store in mock-vault.
pub mock_in_memory_storage: bool,
/// Set the mock-vault path if using file store (`mock_in_memory_storage` is `false`).
pub mock_vault_path: Option<String>,
}
/// Reads the `safe_core` config file and returns it or a default if this fails.
pub fn get_config() -> Config {
Config::new()
}
/// Returns the directory from which the config files are read
pub fn config_dir() -> Result<PathBuf, CoreError> {
Ok(dirs()?.config_dir().to_path_buf())
}
fn dirs() -> Result<ProjectDirs, CoreError> {
let project_dirs = if let Some(custom_path) = unwrap!(CONFIG_DIR_PATH.lock()).clone() {
ProjectDirs::from_path(custom_path)
} else {
DEFAULT_SAFE_CORE_PROJECT_DIRS.clone()
};
project_dirs.ok_or_else(|| CoreError::from("Cannot determine project directory paths"))
}
fn vault_dirs() -> Result<ProjectDirs, CoreError> {
let project_dirs = if let Some(custom_path) = unwrap!(CONFIG_DIR_PATH.lock()).clone() {
ProjectDirs::from_path(custom_path)
} else {
DEFAULT_VAULT_PROJECT_DIRS.clone()
};
project_dirs.ok_or_else(|| CoreError::from("Cannot determine vault directory paths"))
}
fn read_config_file<T>(dirs: ProjectDirs, file: &str) -> Result<T, CoreError>
where
T: DeserializeOwned,
{
let path = dirs.config_dir().join(file);
let file = match File::open(&path) {
Ok(file) => {
trace!("Reading: {}", path.display());
file
}
Err(error) => {
trace!("Not available: {}", path.display());
return Err(error.into());
}
};
let reader = BufReader::new(file);
serde_json::from_reader(reader).map_err(|err| {
info!("Could not parse: {} ({:?})", err, err);
err.into()
})
}
/// Writes a `safe_core` config file **for use by tests and examples**.
///
/// N.B. This method should only be used as a utility for test and examples. In normal use cases,
/// the config file should be created by the Vault's installer.
#[cfg(test)]
pub fn write_config_file(config: &Config) -> Result<PathBuf, CoreError> {
let dir = config_dir()?;
fs::create_dir_all(dir.clone())?;
let path = dir.join(CONFIG_FILE);
dbg!(&path);
let mut file = File::create(&path)?;
serde_json::to_writer_pretty(&mut file, config)?;
file.sync_all()?;
Ok(path)
}
#[cfg(all(test, feature = "mock-network"))]
mod test {
use super::*;
use std::env::temp_dir;
// 1. Write the default config file to temp directory.
// 2. Set the temp directory as the custom config directory path.
// 3. Assert that `Config::new()` reads the default config written to disk.
// 4. Verify that `Config::new()` generates the correct default config.
// The default config will have the custom config path in the
// `boostrap_cache_dir` field and `our_type` will be set to `Client`
#[test]
fn custom_config_path() {
let path = temp_dir();
let temp_dir_path = path.clone();
set_config_dir_path(&path);
// In the default config, `our_type` will be set to Node.
let config: Config = Default::default();
unwrap!(write_config_file(&config));
let read_cfg = Config::new();
assert_eq!(config, read_cfg);
let mut path = unwrap!(ProjectDirs::from_path(temp_dir_path.clone()))
.config_dir()
.to_path_buf();
path.push(CONFIG_FILE);
unwrap!(std::fs::remove_file(path));
// In the absence of a config file, the config handler
// should initialize the `our_type` field to Client.
let config = Config::new();
let expected_config = Config {
quic_p2p: QuicP2pConfig {
our_type: quic_p2p::OurType::Client,
bootstrap_cache_dir: Some(unwrap!(temp_dir_path.into_os_string().into_string())),
..Default::default()
},
..Default::default()
};
assert_eq!(config, expected_config);
}
}<|fim▁end|> |
/// Set a custom path for the config files. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from delta_variance import DeltaVariance, DeltaVariance_Distance |
<|file_name|>TextToPDF.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.tools;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType0Font;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
/**
* This will take a text file and ouput a pdf with that text.
*
* @author Ben Litchfield
*/
public class TextToPDF
{
/**<|fim▁hole|> * The scaling factor for font units to PDF units
*/
private static final int FONTSCALE = 1000;
/**
* The default font
*/
private static final PDType1Font DEFAULT_FONT = PDType1Font.HELVETICA;
/**
* The default font size
*/
private static final int DEFAULT_FONT_SIZE = 10;
/**
* The line height as a factor of the font size
*/
private static final float LINE_HEIGHT_FACTOR = 1.05f;
private int fontSize = DEFAULT_FONT_SIZE;
private PDFont font = DEFAULT_FONT;
private static final Map<String, PDType1Font> STANDARD_14 = new HashMap<String, PDType1Font>();
static
{
STANDARD_14.put(PDType1Font.TIMES_ROMAN.getBaseFont(), PDType1Font.TIMES_ROMAN);
STANDARD_14.put(PDType1Font.TIMES_BOLD.getBaseFont(), PDType1Font.TIMES_BOLD);
STANDARD_14.put(PDType1Font.TIMES_ITALIC.getBaseFont(), PDType1Font.TIMES_ITALIC);
STANDARD_14.put(PDType1Font.TIMES_BOLD_ITALIC.getBaseFont(), PDType1Font.TIMES_BOLD_ITALIC);
STANDARD_14.put(PDType1Font.HELVETICA.getBaseFont(), PDType1Font.HELVETICA);
STANDARD_14.put(PDType1Font.HELVETICA_BOLD.getBaseFont(), PDType1Font.HELVETICA_BOLD);
STANDARD_14.put(PDType1Font.HELVETICA_OBLIQUE.getBaseFont(), PDType1Font.HELVETICA_OBLIQUE);
STANDARD_14.put(PDType1Font.HELVETICA_BOLD_OBLIQUE.getBaseFont(), PDType1Font.HELVETICA_BOLD_OBLIQUE);
STANDARD_14.put(PDType1Font.COURIER.getBaseFont(), PDType1Font.COURIER);
STANDARD_14.put(PDType1Font.COURIER_BOLD.getBaseFont(), PDType1Font.COURIER_BOLD);
STANDARD_14.put(PDType1Font.COURIER_OBLIQUE.getBaseFont(), PDType1Font.COURIER_OBLIQUE);
STANDARD_14.put(PDType1Font.COURIER_BOLD_OBLIQUE.getBaseFont(), PDType1Font.COURIER_BOLD_OBLIQUE);
STANDARD_14.put(PDType1Font.SYMBOL.getBaseFont(), PDType1Font.SYMBOL);
STANDARD_14.put(PDType1Font.ZAPF_DINGBATS.getBaseFont(), PDType1Font.ZAPF_DINGBATS);
}
/**
* Create a PDF document with some text.
*
* @param text The stream of text data.
*
* @return The document with the text in it.
*
* @throws IOException If there is an error writing the data.
*/
public PDDocument createPDFFromText( Reader text ) throws IOException
{
PDDocument doc = new PDDocument();
createPDFFromText(doc, text);
return doc;
}
/**
* Create a PDF document with some text.
*
* @param text The stream of text data.
*
* @throws IOException If there is an error writing the data.
*/
public void createPDFFromText( PDDocument doc, Reader text ) throws IOException
{
try
{
final int margin = 40;
float height = font.getBoundingBox().getHeight() / FONTSCALE;
//calculate font height and increase by a factor.
height = height*fontSize*LINE_HEIGHT_FACTOR;
BufferedReader data = new BufferedReader( text );
String nextLine = null;
PDPage page = new PDPage();
PDPageContentStream contentStream = null;
float y = -1;
float maxStringLength = page.getMediaBox().getWidth() - 2*margin;
// There is a special case of creating a PDF document from an empty string.
boolean textIsEmpty = true;
while( (nextLine = data.readLine()) != null )
{
// The input text is nonEmpty. New pages will be created and added
// to the PDF document as they are needed, depending on the length of
// the text.
textIsEmpty = false;
String[] lineWords = nextLine.trim().split( " " );
int lineIndex = 0;
while( lineIndex < lineWords.length )
{
StringBuilder nextLineToDraw = new StringBuilder();
float lengthIfUsingNextWord = 0;
do
{
nextLineToDraw.append( lineWords[lineIndex] );
nextLineToDraw.append( " " );
lineIndex++;
if( lineIndex < lineWords.length )
{
String lineWithNextWord = nextLineToDraw.toString() + lineWords[lineIndex];
lengthIfUsingNextWord =
(font.getStringWidth( lineWithNextWord )/FONTSCALE) * fontSize;
}
}
while( lineIndex < lineWords.length &&
lengthIfUsingNextWord < maxStringLength );
if( y < margin )
{
// We have crossed the end-of-page boundary and need to extend the
// document by another page.
page = new PDPage();
doc.addPage( page );
if( contentStream != null )
{
contentStream.endText();
contentStream.close();
}
contentStream = new PDPageContentStream(doc, page);
contentStream.setFont( font, fontSize );
contentStream.beginText();
y = page.getMediaBox().getHeight() - margin + height;
contentStream.newLineAtOffset(
margin, y);
}
if( contentStream == null )
{
throw new IOException( "Error:Expected non-null content stream." );
}
contentStream.newLineAtOffset(0, -height);
y -= height;
contentStream.showText(nextLineToDraw.toString());
}
}
// If the input text was the empty string, then the above while loop will have short-circuited
// and we will not have added any PDPages to the document.
// So in order to make the resultant PDF document readable by Adobe Reader etc, we'll add an empty page.
if (textIsEmpty)
{
doc.addPage(page);
}
if( contentStream != null )
{
contentStream.endText();
contentStream.close();
}
}
catch( IOException io )
{
if( doc != null )
{
doc.close();
}
throw io;
}
}
/**
* This will create a PDF document with some text in it.
* <br />
* see usage() for commandline
*
* @param args Command line arguments.
*
* @throws IOException If there is an error with the PDF.
*/
public static void main(String[] args) throws IOException
{
// suppress the Dock icon on OS X
System.setProperty("apple.awt.UIElement", "true");
TextToPDF app = new TextToPDF();
PDDocument doc = new PDDocument();
try
{
if( args.length < 2 )
{
app.usage();
}
else
{
for( int i=0; i<args.length-2; i++ )
{
if( args[i].equals( "-standardFont" ))
{
i++;
app.setFont( getStandardFont( args[i] ));
}
else if( args[i].equals( "-ttf" ))
{
i++;
PDFont font = PDType0Font.load( doc, new File( args[i]) );
app.setFont( font );
}
else if( args[i].equals( "-fontSize" ))
{
i++;
app.setFontSize( Integer.parseInt( args[i] ) );
}
else
{
throw new IOException( "Unknown argument:" + args[i] );
}
}
app.createPDFFromText( doc, new FileReader( args[args.length-1] ) );
doc.save( args[args.length-2] );
}
}
finally
{
doc.close();
}
}
/**
* This will print out a message telling how to use this example.
*/
private void usage()
{
String[] std14 = getStandard14Names();
StringBuilder message = new StringBuilder();
message.append("Usage: jar -jar pdfbox-app-x.y.z.jar TextToPDF [options] <outputfile> <textfile>\n");
message.append("\nOptions:\n");
message.append(" -standardFont <name> : " + DEFAULT_FONT.getBaseFont() + " (default)\n");
for (String std14String : std14)
{
message.append(" " + std14String + "\n");
}
message.append(" -ttf <ttf file> : The TTF font to use.\n");
message.append(" -fontSize <fontSize> : default: " + DEFAULT_FONT_SIZE );
System.err.println(message.toString());
System.exit(1);
}
/**
* A convenience method to get one of the standard 14 font from name.
*
* @param name The name of the font to get.
*
* @return The font that matches the name or null if it does not exist.
*/
private static PDType1Font getStandardFont(String name)
{
return STANDARD_14.get(name);
}
/**
* This will get the names of the standard 14 fonts.
*
* @return An array of the names of the standard 14 fonts.
*/
private static String[] getStandard14Names()
{
return STANDARD_14.keySet().toArray(new String[14]);
}
/**
* @return Returns the font.
*/
public PDFont getFont()
{
return font;
}
/**
* @param aFont The font to set.
*/
public void setFont(PDFont aFont)
{
this.font = aFont;
}
/**
* @return Returns the fontSize.
*/
public int getFontSize()
{
return fontSize;
}
/**
* @param aFontSize The fontSize to set.
*/
public void setFontSize(int aFontSize)
{
this.fontSize = aFontSize;
}
}<|fim▁end|> | |
<|file_name|>const.ts<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
export const DUMMY_SECRET = `[SECRET HIDDEN]`;<|fim▁end|> | |
<|file_name|>na.js<|end_file_name|><|fim▁begin|>var fs = require('fs');
var assert = require('assert');
var JSZip = require('jszip');
var path = require('path')
var compareWorkbooks = require('./util/compareworkbooks.js')
var excelbuilder = require('..');
describe('It generates a simple workbook', function () {
it('generates a ZIP file we can save', function (done) {
var workbook = excelbuilder.createWorkbook();
var table = [
[1, 2, "", 4, 5],
[2, 4, null, 16, 20],
[1, 4, NaN, 16, 25],
[4, 8, undefined, 16, 20]
]
var sheet1 = workbook.createSheet('sheet1', table[0].length, table.length);
table.forEach(function (row, rowIdx) {
row.forEach(function (val, colIdx) {
sheet1.set(colIdx + 1, rowIdx + 1, val)
})
})
workbook.generate(function (err, zip) {
if (err) throw err;
zip.generateAsync({type: "nodebuffer"}).then(function (buffer) {
var OUTFILE = './test/out/example.xlsx';<|fim▁hole|> console.log('open \"' + OUTFILE + "\"");
compareWorkbooks('./test/files/example.xlsx', OUTFILE, function (err, result) {
if (err) throw err;
// assert(result)
done(err);
});
});
});
});
});
});<|fim▁end|> | fs.writeFile(OUTFILE, buffer, function (err) { |
<|file_name|>modules.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#<|fim▁hole|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for module management."""
# Do not add any imports to non-standard modules here.
import os
import site
import sys
def _config_modules_directory(root_directory):
"""Get the config modules directory."""
config_dir = os.getenv('CONFIG_DIR_OVERRIDE')
if not config_dir:
config_dir = os.path.join(root_directory, 'src', 'appengine', 'config')
return os.path.join(config_dir, 'modules')
def _patch_appengine_modules_for_bots():
"""Patch out App Engine reliant behaviour from bots."""
if os.getenv('SERVER_SOFTWARE'):
# Not applicable on App Engine.
return
# google.auth uses App Engine credentials based on importability of
# google.appengine.api.app_identity.
try:
from google.auth import app_engine as auth_app_engine
if auth_app_engine.app_identity:
auth_app_engine.app_identity = None
except ImportError:
pass
def fix_module_search_paths():
"""Add directories that we must be able to import from to path."""
root_directory = os.environ['ROOT_DIR']
source_directory = os.path.join(root_directory, 'src')
python_path = os.getenv('PYTHONPATH', '').split(os.pathsep)
third_party_libraries_directory = os.path.join(source_directory,
'third_party')
config_modules_directory = _config_modules_directory(root_directory)
if (os.path.exists(config_modules_directory) and
config_modules_directory not in sys.path):
sys.path.insert(0, config_modules_directory)
python_path.insert(0, config_modules_directory)
if third_party_libraries_directory not in sys.path:
sys.path.insert(0, third_party_libraries_directory)
python_path.insert(0, third_party_libraries_directory)
if source_directory not in sys.path:
sys.path.insert(0, source_directory)
python_path.insert(0, source_directory)
os.environ['PYTHONPATH'] = os.pathsep.join(python_path)
# Add site directory to make from imports work in google namespace.
site.addsitedir(third_party_libraries_directory)
# TODO(ochang): Remove this once SDK is removed from images.
_patch_appengine_modules_for_bots()<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# |
<|file_name|>low-priority.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _pure = require('recompose/pure');
var _pure2 = _interopRequireDefault(_pure);
var _SvgIcon = require('../../SvgIcon');
var _SvgIcon2 = _interopRequireDefault(_SvgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var ContentLowPriority = function ContentLowPriority(props) {
return _react2.default.createElement(
_SvgIcon2.default,
props,
_react2.default.createElement('path', { d: 'M14 5h8v2h-8zm0 5.5h8v2h-8zm0 5.5h8v2h-8zM2 11.5C2 15.08 4.92 18 8.5 18H9v2l3-3-3-3v2h-.5C6.02 16 4 13.98 4 11.5S6.02 7 8.5 7H12V5H8.5C4.92 5 2 7.92 2 11.5z' })
);
};
ContentLowPriority = (0, _pure2.default)(ContentLowPriority);
ContentLowPriority.displayName = 'ContentLowPriority';
exports.default = ContentLowPriority;<|fim▁end|> | |
<|file_name|>maildir.rs<|end_file_name|><|fim▁begin|>use std::time::Duration;
use crossbeam_channel::Sender;
use maildir::Maildir as ExtMaildir;
use serde_derive::Deserialize;
use crate::blocks::{Block, ConfigBlock, Update};
use crate::config::SharedConfig;
use crate::de::deserialize_duration;
use crate::errors::*;
use crate::scheduler::Task;
use crate::widgets::text::TextWidget;<|fim▁hole|>#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MailType {
New,
Cur,
All,
}
impl MailType {
fn count_mail(&self, maildir: &ExtMaildir) -> usize {
match self {
MailType::New => maildir.count_new(),
MailType::Cur => maildir.count_cur(),
MailType::All => maildir.count_new() + maildir.count_cur(),
}
}
}
pub struct Maildir {
id: usize,
text: TextWidget,
update_interval: Duration,
inboxes: Vec<String>,
threshold_warning: usize,
threshold_critical: usize,
display_type: MailType,
}
//TODO add `format`
#[derive(Deserialize, Debug, Clone)]
#[serde(deny_unknown_fields, default)]
pub struct MaildirConfig {
/// Update interval in seconds
#[serde(deserialize_with = "deserialize_duration")]
pub interval: Duration,
pub inboxes: Vec<String>,
pub threshold_warning: usize,
pub threshold_critical: usize,
pub display_type: MailType,
// DEPRECATED
pub icon: bool,
}
impl Default for MaildirConfig {
fn default() -> Self {
Self {
interval: Duration::from_secs(5),
inboxes: Vec::new(),
threshold_warning: 1,
threshold_critical: 10,
display_type: MailType::New,
icon: true,
}
}
}
impl ConfigBlock for Maildir {
type Config = MaildirConfig;
fn new(
id: usize,
block_config: Self::Config,
shared_config: SharedConfig,
_tx_update_request: Sender<Task>,
) -> Result<Self> {
let widget = TextWidget::new(id, 0, shared_config).with_text("");
Ok(Maildir {
id,
update_interval: block_config.interval,
text: if block_config.icon {
widget.with_icon("mail")?
} else {
widget
},
inboxes: block_config.inboxes,
threshold_warning: block_config.threshold_warning,
threshold_critical: block_config.threshold_critical,
display_type: block_config.display_type,
})
}
}
impl Block for Maildir {
fn update(&mut self) -> Result<Option<Update>> {
let mut newmails = 0;
for inbox in &self.inboxes {
let isl: &str = &inbox[..];
let maildir = ExtMaildir::from(isl);
newmails += self.display_type.count_mail(&maildir)
}
let mut state = State::Idle;
if newmails >= self.threshold_critical {
state = State::Critical;
} else if newmails >= self.threshold_warning {
state = State::Warning;
}
self.text.set_state(state);
self.text.set_text(format!("{}", newmails));
Ok(Some(self.update_interval.into()))
}
fn view(&self) -> Vec<&dyn I3BarWidget> {
vec![&self.text]
}
fn id(&self) -> usize {
self.id
}
}<|fim▁end|> | use crate::widgets::{I3BarWidget, State};
|
<|file_name|>test_wrappers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from werkzeug.contrib import wrappers
from werkzeug import routing
from werkzeug.wrappers import Request, Response
def test_reverse_slash_behavior():
"""Test ReverseSlashBehaviorRequestMixin"""
class MyRequest(wrappers.ReverseSlashBehaviorRequestMixin, Request):
pass
req = MyRequest.from_values('/foo/bar', 'http://example.com/test')
assert req.url == 'http://example.com/test/foo/bar'
assert req.path == 'foo/bar'<|fim▁hole|> map = routing.Map([routing.Rule('/foo/bar', endpoint='foo')])
adapter = map.bind_to_environ(req.environ)
assert adapter.match() == ('foo', {})
adapter = map.bind(req.host, req.script_root)
assert adapter.match(req.path) == ('foo', {})
def test_dynamic_charset_request_mixin():
"""Test DynamicCharsetRequestMixin"""
class MyRequest(wrappers.DynamicCharsetRequestMixin, Request):
pass
env = {'CONTENT_TYPE': 'text/html'}
req = MyRequest(env)
assert req.charset == 'latin1'
env = {'CONTENT_TYPE': 'text/html; charset=utf-8'}
req = MyRequest(env)
assert req.charset == 'utf-8'
env = {'CONTENT_TYPE': 'application/octet-stream'}
req = MyRequest(env)
assert req.charset == 'latin1'
assert req.url_charset == 'latin1'
MyRequest.url_charset = 'utf-8'
env = {'CONTENT_TYPE': 'application/octet-stream'}
req = MyRequest(env)
assert req.charset == 'latin1'
assert req.url_charset == 'utf-8'
def return_ascii(x):
return "ascii"
env = {'CONTENT_TYPE': 'text/plain; charset=x-weird-charset'}
req = MyRequest(env)
req.unknown_charset = return_ascii
assert req.charset == 'ascii'
assert req.url_charset == 'utf-8'
def test_dynamic_charset_response_mixin():
"""Test DynamicCharsetResponseMixin"""
class MyResponse(wrappers.DynamicCharsetResponseMixin, Response):
default_charset = 'utf-7'
resp = MyResponse(mimetype='text/html')
assert resp.charset == 'utf-7'
resp.charset = 'utf-8'
assert resp.charset == 'utf-8'
assert resp.mimetype == 'text/html'
assert resp.mimetype_params == {'charset': 'utf-8'}
resp.mimetype_params['charset'] = 'iso-8859-15'
assert resp.charset == 'iso-8859-15'
resp.data = u'Hällo Wörld'
assert ''.join(resp.iter_encoded()) == \
u'Hällo Wörld'.encode('iso-8859-15')
del resp.headers['content-type']
try:
resp.charset = 'utf-8'
except TypeError, e:
pass
else:
assert False, 'expected type error on charset setting without ct'<|fim▁end|> | assert req.script_root == '/test/'
# make sure the routing system works with the slashes in
# reverse order as well. |
<|file_name|>image_reorientation.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Thu Aug 1 16:10:56 2013
@author: vterzopoulos, abrys
"""
# To ignore numpy errors:
# pylint: disable=E1101
import nibabel
import numpy
from dicom2nifti.image_volume import load, SliceType, ImageVolume
def reorient_image(input_image, output_image):
"""
Change the orientation of the Image data in order to be in LAS space
x will represent the coronal plane, y the sagittal and z the axial plane.
x increases from Right (R) to Left (L), y from Posterior (P) to Anterior (A) and z from Inferior (I) to Superior (S)
:returns: The output image in nibabel form
:param output_image: filepath to the nibabel image
:param input_image: filepath to the nibabel image
"""
# Use the imageVolume module to find which coordinate corresponds to each plane
# and get the image data in RAS orientation
# print 'Reading nifti'
if isinstance(input_image, nibabel.Nifti1Image):
image = ImageVolume(input_image)
else:
image = load(input_image)
# 4d have a different conversion to 3d
# print 'Reorganizing data'
if image.nifti_data.squeeze().ndim == 4:
new_image = _reorient_4d(image)
elif image.nifti_data.squeeze().ndim == 3 or image.nifti_data.ndim == 3 or image.nifti_data.squeeze().ndim == 2:
new_image = _reorient_3d(image)
else:
raise Exception('Only 3d and 4d images are supported')
# print 'Recreating affine'
affine = image.nifti.affine
# Based on VolumeImage.py where slice orientation 1 represents the axial plane
# Flipping on the data may be needed based on x_inverted, y_inverted, ZInverted
# Create new affine header by changing the order of the columns of the input image header
# the last column with the origin depends on the origin of the original image, the size and the direction of x,y,z
new_affine = numpy.eye(4)
new_affine[:, 0] = affine[:, image.sagittal_orientation.normal_component]
new_affine[:, 1] = affine[:, image.coronal_orientation.normal_component]
new_affine[:, 2] = affine[:, image.axial_orientation.normal_component]
point = [0, 0, 0, 1]
# If the orientation of coordinates is inverted, then the origin of the "new" image
# would correspond to the last voxel of the original image
# First we need to find which point is the origin point in image coordinates
# and then transform it in world coordinates
if not image.axial_orientation.x_inverted:
new_affine[:, 0] = - new_affine[:, 0]
point[image.sagittal_orientation.normal_component] = image.dimensions[
image.sagittal_orientation.normal_component] - 1
# new_affine[0, 3] = - new_affine[0, 3]
if image.axial_orientation.y_inverted:
new_affine[:, 1] = - new_affine[:, 1]
point[image.coronal_orientation.normal_component] = image.dimensions[
image.coronal_orientation.normal_component] - 1
# new_affine[1, 3] = - new_affine[1, 3]
if image.coronal_orientation.y_inverted:
new_affine[:, 2] = - new_affine[:, 2]
point[image.axial_orientation.normal_component] = image.dimensions[image.axial_orientation.normal_component] - 1
# new_affine[2, 3] = - new_affine[2, 3]
new_affine[:, 3] = numpy.dot(affine, point)
# DONE: Needs to update new_affine, so that there is no translation difference between the original
# and created image (now there is 1-2 voxels translation)
# print 'Creating new nifti image'<|fim▁hole|> output = nibabel.nifti1.Nifti1Image(new_image, new_affine)
output.header.set_slope_inter(1, 0)
output.header.set_xyzt_units(2) # set units for xyz (leave t as unknown)
output.to_filename(output_image)
return output
def _reorient_4d(image):
"""
Reorganize the data for a 4d nifti
"""
# print 'converting 4d image'
# Create empty array where x,y,z correspond to LR (sagittal), PA (coronal), IS (axial) directions and the size
# of the array in each direction is the same with the corresponding direction of the input image.
new_image = numpy.zeros([image.dimensions[image.sagittal_orientation.normal_component],
image.dimensions[image.coronal_orientation.normal_component],
image.dimensions[image.axial_orientation.normal_component],
image.dimensions[3]],
dtype=image.nifti_data.dtype)
# loop over all timepoints
for timepoint in range(0, image.dimensions[3]):
# Fill the new image with the values of the input image but with mathicng the orientation with x,y,z
if image.coronal_orientation.y_inverted:
for i in range(new_image.shape[2]):
new_image[:, :, i, timepoint] = numpy.fliplr(numpy.squeeze(image.get_slice(SliceType.AXIAL,
new_image.shape[2] - 1 - i,
timepoint).original_data))
else:
for i in range(new_image.shape[2]):
new_image[:, :, i, timepoint] = numpy.fliplr(numpy.squeeze(image.get_slice(SliceType.AXIAL,
i, timepoint).original_data))
return new_image
def _reorient_3d(image):
"""
Reorganize the data for a 3d nifti
"""
# Create empty array where x,y,z correspond to LR (sagittal), PA (coronal), IS (axial) directions and the size
# of the array in each direction is the same with the corresponding direction of the input image.
new_image = numpy.zeros([image.dimensions[image.sagittal_orientation.normal_component],
image.dimensions[image.coronal_orientation.normal_component],
image.dimensions[image.axial_orientation.normal_component]],
dtype=image.nifti_data.dtype)
# Fill the new image with the values of the input image but with matching the orientation with x,y,z
if image.coronal_orientation.y_inverted:
for i in range(new_image.shape[2]):
new_image[:, :, i] = numpy.fliplr(image.get_slice(SliceType.AXIAL,
new_image.shape[2] - 1 - i).original_data)
else:
for i in range(new_image.shape[2]):
new_image[:, :, i] = numpy.fliplr(image.get_slice(SliceType.AXIAL,
i).original_data)
return new_image<|fim▁end|> | if new_image.ndim > 3: # do not squeeze single slice data
new_image = new_image.squeeze() |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import {Server, Config} from "./server";
import * as path from "path";
/**<|fim▁hole|> const configPath = path.join(__dirname, process.argv[2]);
console.info("loading configuration file: " + configPath);
config = require(configPath);
}else {
console.error("no configuration file provided, exiting");
process.exit();
};
const server = new Server(config);
server.run();<|fim▁end|> | * Load configuration file given as command line parameter
*/
let config: Config;
if (process.argv.length > 2) { |
<|file_name|>resource_arm_loadbalancer_nat_pool.go<|end_file_name|><|fim▁begin|>package azurerm
import (
"fmt"
"log"
"time"
"github.com/Azure/azure-sdk-for-go/arm/network"
"github.com/hashicorp/errwrap"
"github.com/r3labs/terraform/helper/resource"
"github.com/r3labs/terraform/helper/schema"
"github.com/jen20/riviera/azure"
)
func resourceArmLoadBalancerNatPool() *schema.Resource {
return &schema.Resource{
Create: resourceArmLoadBalancerNatPoolCreate,
Read: resourceArmLoadBalancerNatPoolRead,
Update: resourceArmLoadBalancerNatPoolCreate,
Delete: resourceArmLoadBalancerNatPoolDelete,
Importer: &schema.ResourceImporter{
State: loadBalancerSubResourceStateImporter,
},
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"location": {
Type: schema.TypeString,
ForceNew: true,
Optional: true,
StateFunc: azureRMNormalizeLocation,
DiffSuppressFunc: azureRMSuppressLocationDiff,
Deprecated: "location is no longer used",
},
"resource_group_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"loadbalancer_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"protocol": {
Type: schema.TypeString,
Required: true,
StateFunc: ignoreCaseStateFunc,
DiffSuppressFunc: ignoreCaseDiffSuppressFunc,
},
"frontend_port_start": {
Type: schema.TypeInt,
Required: true,
},
"frontend_port_end": {
Type: schema.TypeInt,
Required: true,
},
"backend_port": {
Type: schema.TypeInt,
Required: true,
},
"frontend_ip_configuration_name": {
Type: schema.TypeString,
Required: true,
},
"frontend_ip_configuration_id": {
Type: schema.TypeString,
Computed: true,
},
},
}
}
func resourceArmLoadBalancerNatPoolCreate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient)
lbClient := client.loadBalancerClient
loadBalancerID := d.Get("loadbalancer_id").(string)
armMutexKV.Lock(loadBalancerID)
defer armMutexKV.Unlock(loadBalancerID)
loadBalancer, exists, err := retrieveLoadBalancerById(loadBalancerID, meta)
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer By ID {{err}}", err)
}
if !exists {
d.SetId("")
log.Printf("[INFO] LoadBalancer %q not found. Removing from state", d.Get("name").(string))
return nil
}
newNatPool, err := expandAzureRmLoadBalancerNatPool(d, loadBalancer)
if err != nil {
return errwrap.Wrapf("Error Expanding NAT Pool {{err}}", err)
}
natPools := append(*loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools, *newNatPool)
existingNatPool, existingNatPoolIndex, exists := findLoadBalancerNatPoolByName(loadBalancer, d.Get("name").(string))
if exists {
if d.Get("name").(string) == *existingNatPool.Name {
// this probe is being updated/reapplied remove old copy from the slice
natPools = append(natPools[:existingNatPoolIndex], natPools[existingNatPoolIndex+1:]...)
}
}
loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools = &natPools
resGroup, loadBalancerName, err := resourceGroupAndLBNameFromId(d.Get("loadbalancer_id").(string))
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer Name and Group: {{err}}", err)
}
_, err = lbClient.CreateOrUpdate(resGroup, loadBalancerName, *loadBalancer, make(chan struct{}))
if err != nil {
return errwrap.Wrapf("Error Creating/Updating LoadBalancer {{err}}", err)
}
read, err := lbClient.Get(resGroup, loadBalancerName, "")
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer {{err}}", err)
}
if read.ID == nil {
return fmt.Errorf("Cannot read LoadBalancer %s (resource group %s) ID", loadBalancerName, resGroup)
}
var natPool_id string
for _, InboundNatPool := range *(*read.LoadBalancerPropertiesFormat).InboundNatPools {
if *InboundNatPool.Name == d.Get("name").(string) {
natPool_id = *InboundNatPool.ID
}
}
if natPool_id != "" {
d.SetId(natPool_id)
} else {
return fmt.Errorf("Cannot find created LoadBalancer NAT Pool ID %q", natPool_id)
}
log.Printf("[DEBUG] Waiting for LoadBalancer (%s) to become available", loadBalancerName)
stateConf := &resource.StateChangeConf{
Pending: []string{"Accepted", "Updating"},
Target: []string{"Succeeded"},
Refresh: loadbalancerStateRefreshFunc(client, resGroup, loadBalancerName),
Timeout: 10 * time.Minute,
}
if _, err := stateConf.WaitForState(); err != nil {
return fmt.Errorf("Error waiting for LoadBalancer (%s) to become available: %s", loadBalancerName, err)
}
return resourceArmLoadBalancerNatPoolRead(d, meta)<|fim▁hole|>
func resourceArmLoadBalancerNatPoolRead(d *schema.ResourceData, meta interface{}) error {
id, err := parseAzureResourceID(d.Id())
if err != nil {
return err
}
name := id.Path["inboundNatPools"]
loadBalancer, exists, err := retrieveLoadBalancerById(d.Get("loadbalancer_id").(string), meta)
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer By ID {{err}}", err)
}
if !exists {
d.SetId("")
log.Printf("[INFO] LoadBalancer %q not found. Removing from state", name)
return nil
}
config, _, exists := findLoadBalancerNatPoolByName(loadBalancer, name)
if !exists {
d.SetId("")
log.Printf("[INFO] LoadBalancer Nat Pool %q not found. Removing from state", name)
return nil
}
d.Set("name", config.Name)
d.Set("resource_group_name", id.ResourceGroup)
d.Set("protocol", config.InboundNatPoolPropertiesFormat.Protocol)
d.Set("frontend_port_start", config.InboundNatPoolPropertiesFormat.FrontendPortRangeStart)
d.Set("frontend_port_end", config.InboundNatPoolPropertiesFormat.FrontendPortRangeEnd)
d.Set("backend_port", config.InboundNatPoolPropertiesFormat.BackendPort)
if config.InboundNatPoolPropertiesFormat.FrontendIPConfiguration != nil {
fipID, err := parseAzureResourceID(*config.InboundNatPoolPropertiesFormat.FrontendIPConfiguration.ID)
if err != nil {
return err
}
d.Set("frontend_ip_configuration_name", fipID.Path["frontendIPConfigurations"])
d.Set("frontend_ip_configuration_id", config.InboundNatPoolPropertiesFormat.FrontendIPConfiguration.ID)
}
return nil
}
func resourceArmLoadBalancerNatPoolDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*ArmClient)
lbClient := client.loadBalancerClient
loadBalancerID := d.Get("loadbalancer_id").(string)
armMutexKV.Lock(loadBalancerID)
defer armMutexKV.Unlock(loadBalancerID)
loadBalancer, exists, err := retrieveLoadBalancerById(loadBalancerID, meta)
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer By ID {{err}}", err)
}
if !exists {
d.SetId("")
return nil
}
_, index, exists := findLoadBalancerNatPoolByName(loadBalancer, d.Get("name").(string))
if !exists {
return nil
}
oldNatPools := *loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools
newNatPools := append(oldNatPools[:index], oldNatPools[index+1:]...)
loadBalancer.LoadBalancerPropertiesFormat.InboundNatPools = &newNatPools
resGroup, loadBalancerName, err := resourceGroupAndLBNameFromId(d.Get("loadbalancer_id").(string))
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer Name and Group: {{err}}", err)
}
_, err = lbClient.CreateOrUpdate(resGroup, loadBalancerName, *loadBalancer, make(chan struct{}))
if err != nil {
return errwrap.Wrapf("Error Creating/Updating LoadBalancer {{err}}", err)
}
read, err := lbClient.Get(resGroup, loadBalancerName, "")
if err != nil {
return errwrap.Wrapf("Error Getting LoadBalancer {{err}}", err)
}
if read.ID == nil {
return fmt.Errorf("Cannot read LoadBalancer %s (resource group %s) ID", loadBalancerName, resGroup)
}
return nil
}
func expandAzureRmLoadBalancerNatPool(d *schema.ResourceData, lb *network.LoadBalancer) (*network.InboundNatPool, error) {
properties := network.InboundNatPoolPropertiesFormat{
Protocol: network.TransportProtocol(d.Get("protocol").(string)),
FrontendPortRangeStart: azure.Int32(int32(d.Get("frontend_port_start").(int))),
FrontendPortRangeEnd: azure.Int32(int32(d.Get("frontend_port_end").(int))),
BackendPort: azure.Int32(int32(d.Get("backend_port").(int))),
}
if v := d.Get("frontend_ip_configuration_name").(string); v != "" {
rule, _, exists := findLoadBalancerFrontEndIpConfigurationByName(lb, v)
if !exists {
return nil, fmt.Errorf("[ERROR] Cannot find FrontEnd IP Configuration with the name %s", v)
}
feip := network.SubResource{
ID: rule.ID,
}
properties.FrontendIPConfiguration = &feip
}
natPool := network.InboundNatPool{
Name: azure.String(d.Get("name").(string)),
InboundNatPoolPropertiesFormat: &properties,
}
return &natPool, nil
}<|fim▁end|> | } |
<|file_name|>solution-table-cell-directive.js<|end_file_name|><|fim▁begin|>/**
* Created by eirikskogland on 24.09.15.
*/
angular.module('ExamApp').directive('solutionTableCell', function() {
return {
restrict: 'E',
templateUrl: 'templates/directives/solution-table-cell.html',
scope: {
tableIndex: '@',
hasSolution: '@'
},
controller: function($scope) {
console.log("Solution Table Cell Controller");
$scope.showingSolutionRow = false;
$scope.showSolutionForm = function(index) {
$(document.getElementById("solutionForm"+index)).toggle();
};
$scope.showSolutionPDF = function(index) {
$(document.getElementById("solutionPDF"+index)).toggle();
};
$scope.showSolutionClicked = function(index) {
$scope.showingSolutionRow = !$scope.showingSolutionRow;
if($scope.hasSolution) {
// show solution
$scope.showSolutionPDF(index);
} else {
// if logged in
// show solution form
$scope.showSolutionForm(index);
// else
// show login message
}
};
},<|fim▁hole|> controllerAs: 'ctrl'
}
});<|fim▁end|> | |
<|file_name|>deriving-span-Rand-enum-struct-variant.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'<|fim▁hole|>
struct Error;
#[deriving(Rand)]
enum Enum {
A {
x: Error //~ ERROR
}
}
fn main() {}<|fim▁end|> |
#![feature(struct_variant)]
extern crate rand;
|
<|file_name|>progresshud.py<|end_file_name|><|fim▁begin|>import os
from kivy.lang import Builder
from kivy.properties import NumericProperty, StringProperty
from kivy.uix.anchorlayout import AnchorLayout
from cobiv.modules.core.hud import Hud
Builder.load_file(os.path.abspath(os.path.join(os.path.dirname(__file__), 'progresshud.kv')))
class ProgressHud(Hud, AnchorLayout):
value = NumericProperty(0)
caption = StringProperty("")<|fim▁hole|><|fim▁end|> |
def __init__(self, **kwargs):
super(ProgressHud, self).__init__(**kwargs) |
<|file_name|>issue-16098.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
macro_rules! prob1 {
(0) => {
0
};
($n:expr) => {
if ($n % 3 == 0) || ($n % 5 == 0) {<|fim▁hole|> };
}
fn main() {
println!("Problem 1: {}", prob1!(1000));
}<|fim▁end|> | $n + prob1!($n - 1); //~ ERROR recursion limit reached while expanding the macro `prob1`
} else {
prob1!($n - 1);
} |
<|file_name|>EigenDecompositor.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2011-2013, by Vladimir Kostyukov and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): Maxim Samoylov
*
*/
package org.la4j.decomposition;
import org.la4j.Matrices;
import org.la4j.Matrix;
import org.la4j.matrix.SparseMatrix;
import org.la4j.Vector;
import org.la4j.Vectors;
import org.la4j.vector.DenseVector;
import org.la4j.vector.functor.VectorAccumulator;
/**
* This class represents Eigen decomposition of matrices. More details
* <p>
* <a href="http://mathworld.wolfram.com/EigenDecomposition.html"> here.</a>
* </p>
*/
public class EigenDecompositor extends AbstractDecompositor implements MatrixDecompositor {
public EigenDecompositor(Matrix matrix) {
super(matrix);
}
/**
* Returns the result of Eigen (EVD) decomposition of given matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @return { V, D }
*/
@Override
public Matrix[] decompose() {
if (matrix.is(Matrices.SYMMETRIC_MATRIX)) {
return decomposeSymmetricMatrix(matrix);
} else if (matrix.rows() == matrix.columns()) {
return decomposeNonSymmetricMatrix(matrix);
} else {
throw new IllegalArgumentException("Can't decompose rectangle matrix");
}
}
@Override
public boolean applicableTo(Matrix matrix) {
return matrix.rows() == matrix.columns();
}
/**
* Returns the result of Eigen decomposition for <a
* href="http://mathworld.wolfram.com/SymmetricMatrix.html"> symmetric</a>
* matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @param matrix
* @return { V, D }
*/
private Matrix[] decomposeSymmetricMatrix(Matrix matrix) {
Matrix d = matrix.copy();
Matrix v = SparseMatrix.identity(matrix.rows());
Vector r = generateR(d);
Matrix u = SparseMatrix.identity(matrix.rows());
VectorAccumulator normAccumulator = Vectors.mkEuclideanNormAccumulator();
double n = Matrices.EPS;
double nn = r.fold(normAccumulator);
int kk = 0;
int ll = 0;
while (Math.abs(n - nn) > Matrices.EPS) {
int k = findMax(r);
int l = findMax(d, k);
regenerateU(u, d, k, l, kk, ll);
kk = k;
ll = l;
v = v.multiply(u);
d = u.transpose().multiply(d.multiply(u));
r.set(k, generateRi(d, k));
r.set(l, generateRi(d, l));
n = nn;
nn = r.fold(normAccumulator);
}
return new Matrix[] { v, d };
}
private int findMax(Vector vector) {
double value = vector.get(0);
int result = 0;
for (int i = 1; i < vector.length(); i++) {
double v = vector.get(i);
if (Math.abs(value) < Math.abs(v)) {
result = i;
value = v;
}
}
return result;
}
private int findMax(Matrix matrix, int i) {
double value = i > 0 ? matrix.get(i, 0) : matrix.get(i, 1);
int result = i > 0 ? 0 : 1;
for (int j = 0; j < matrix.columns(); j++) {
if (i != j) {
double v = matrix.get(i, j);
if (Math.abs(value) < Math.abs(v)) {
result = j;
value = v;
}
}
}
return result;
}
private Vector generateR(Matrix matrix) {
Vector result = DenseVector.zero(matrix.rows());
for (int i = 0; i < matrix.rows(); i++) {
result.set(i, generateRi(matrix, i));
}
return result;
}
private double generateRi(Matrix matrix, int i) {
double acc = 0;
for (int j = 0; j < matrix.columns(); j++) {
if (j != i) {
double value = matrix.get(i, j);
acc += value * value;
}
}
return acc;
}
private void regenerateU(Matrix u, Matrix matrix, int k, int l, int kk, int ll) {
u.set(kk, kk, 1.0);
u.set(ll, ll, 1.0);
u.set(kk, ll, 0.0);
u.set(ll, kk, 0.0);
double alpha = 0.0;
double beta = 0.0;
if (Math.abs(matrix.get(k, k) - matrix.get(l, l)) < Matrices.EPS) {
alpha = beta = Math.sqrt(0.5);
} else {
double mu = 2 * matrix.get(k, l) / (matrix.get(k, k) - matrix.get(l, l));
mu = 1.0 / Math.sqrt(1.0 + mu * mu);
alpha = Math.sqrt(0.5 * (1.0 + mu));
beta = Math.signum(mu) * Math.sqrt(0.5 * (1.0 - mu));
}
u.set(k, k, alpha);
u.set(l, l, alpha);
u.set(k, l, -beta);
u.set(l, k, beta);
}
/**
* Returns the result of Eigen decomposition for non-<a
* href="http://mathworld.wolfram.com/SymmetricMatrix.html">symmetric</a>
* matrix
* <p>
* See <a href="http://mathworld.wolfram.com/EigenDecomposition.html">
* http://mathworld.wolfram.com/EigenDecomposition.html</a> for more
* details.
* </p>
*
* @param matrix
* @return { P, D }
*/
private Matrix[] decomposeNonSymmetricMatrix(Matrix matrix) {
Matrix A = matrix.copy();
int n = matrix.columns();
Matrix v = SparseMatrix.identity(n);
Vector d = DenseVector.zero(n);
Vector e = DenseVector.zero(n);
Matrix h = A.copy();
Vector ort = DenseVector.zero(n);
// Reduce to Hessenberg form.
orthes(h, v, ort);
// Reduce Hessenberg to real Schur form.
hqr2(h, v, d, e);
Matrix dd = matrix.blankOfShape(n, n);
for (int i = 0; i < n; i++) {
dd.set(i, i, d.get(i));
if (e.get(i) > 0) {
dd.set(i, i + 1, e.get(i));
} else if (e.get(i) < 0) {
dd.set(i, i - 1, e.get(i));
}
}
return new Matrix[] { v, dd };
}
// Nonsymmetric reduction to Hessenberg form.
private void orthes(Matrix h, Matrix v, Vector ort) {
// This is derived from the Algol procedures orthes and ortran,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutines in EISPACK.
int n = ort.length();
int low = 0;
int high = n - 1;
for (int m = low + 1; m <= high - 1; m++) {
// Scale column.
double scale = 0.0;
for (int i = m; i <= high; i++) {
scale = scale + Math.abs(h.get(i, m - 1));
}
if (scale != 0.0) {
// Compute Householder transformation.
double hh = 0.0;
for (int i = high; i >= m; i--) {
ort.set(i, h.get(i, m - 1) / scale);
hh += ort.get(i) * ort.get(i);
}
double g = Math.sqrt(hh);
if (ort.get(m) > Matrices.EPS) {
g = -g;
}
hh = hh - ort.get(m) * g;
ort.updateAt(m, Vectors.asMinusFunction(g));
// Apply Householder similarity transformation
// H = (I-u*u'/h)*H*(I-u*u')/h)
for (int j = m; j < n; j++) {
double f = 0.0;
for (int i = high; i >= m; i--) {
f += ort.get(i) * h.get(i, j);
}
f = f / hh;
for (int i = m; i <= high; i++) {
h.updateAt(i, j, Matrices.asMinusFunction(f * ort.get(i)));
}
}
for (int i = 0; i <= high; i++) {
double f = 0.0;
for (int j = high; j >= m; j--) {
f += ort.get(j) * h.get(i, j);
}
f = f / hh;
for (int j = m; j <= high; j++) {
h.updateAt(i, j, Matrices.asMinusFunction(f * ort.get(j)));
}
}
ort.set(m, scale * ort.get(m));
h.set(m, m - 1, scale * g);
}
}
// Accumulate transformations (Algol's ortran).
for (int m = high - 1; m >= low + 1; m--) {
if (Math.abs(h.get(m, m - 1)) > Matrices.EPS) {
for (int i = m + 1; i <= high; i++) {
ort.set(i, h.get(i, m - 1));
}
for (int j = m; j <= high; j++) {
double g = 0.0;
for (int i = m; i <= high; i++) {
g += ort.get(i) * v.get(i, j);
}
// Double division avoids possible underflow
g = (g / ort.get(m)) / h.get(m, m - 1);
for (int i = m; i <= high; i++) {
v.updateAt(i, j, Matrices.asPlusFunction(g * ort.get(i)));
}
}
}
}
}
// Nonsymmetric reduction from Hessenberg to real Schur form.
private void hqr2(Matrix H, Matrix V, Vector d, Vector e) {
// This is derived from the Algol procedure hqr2,
// by Martin and Wilkinson, Handbook for Auto. Comp.,
// Vol.ii-Linear Algebra, and the corresponding
// Fortran subroutine in EISPACK.
// Initialize
int nn = e.length();
int n = nn - 1;
int low = 0;
int high = nn - 1;
double eps = Math.pow(2.0, -52.0);
double exshift = 0.0;
double p = 0;
double q = 0;
double r = 0;
double s = 0;
double z = 0;
double t;
double w;
double x;
double y;
// Store roots isolated by balanc and compute matrix norm
double norm = 0.0;
for (int i = 0; i < nn; i++) {
if (i < low | i > high) {
d.set(i, H.get(i, i));
e.set(i, 0.0);
}
for (int j = Math.max(i - 1, 0); j < nn; j++) {
norm = norm + Math.abs(H.get(i, j));
}
}
// Outer loop over eigenvalue index
int iter = 0;
while (n >= low) {
// Look for single small sub-diagonal element
int l = n;
while (l > low) {
s = Math.abs(H.get(l - 1, l - 1))
+ Math.abs(H.get(l, l));
if (s == 0.0) {
s = norm;
}
if (Math.abs(H.get(l, l - 1)) < eps * s) {
break;
}
l--;
}
// Check for convergence
// One root found
if (l == n) {
H.updateAt(n, n, Matrices.asPlusFunction(exshift));
d.set(n, H.get(n, n));
e.set(n, 0.0);
n--;
iter = 0;
// Two roots found
} else if (l == n - 1) {
w = H.get(n, n - 1) * H.get(n - 1, n);
p = (H.get(n - 1, n - 1) - H.get(n, n)) / 2.0;
q = p * p + w;
z = Math.sqrt(Math.abs(q));
H.updateAt(n, n, Matrices.asPlusFunction(exshift));
H.updateAt(n - 1, n - 1, Matrices.asPlusFunction(exshift));
x = H.get(n, n);
// Real pair
if (q >= 0) {
if (p >= 0) {
z = p + z;
} else {
z = p - z;
}
d.set(n - 1, x + z);
d.set(n, d.get(n - 1));
if (z != 0.0) {
d.set(n, x - w / z);
}
e.set(n - 1, 0.0);
e.set(n, 0.0);
x = H.get(n, n - 1);
s = Math.abs(x) + Math.abs(z);
p = x / s;
q = z / s;
r = Math.sqrt(p * p + q * q);
p = p / r;
q = q / r;
// Row modification
<|fim▁hole|> H.set(n, j, q * H.get(n, j) - p * z);
}
// Column modification
for (int i = 0; i <= n; i++) {
z = H.get(i, n - 1);
H.set(i, n - 1, q * z + p * H.get(i, n));
H.set(i, n, q * H.get(i, n) - p * z);
}
// Accumulate transformations
for (int i = low; i <= high; i++) {
z = V.get(i, n - 1);
V.set(i, n - 1, q * z + p * V.get(i, n));
V.set(i, n, q * V.get(i, n) - p * z);
}
// Complex pair
} else {
d.set(n - 1, x + p);
d.set(n, x + p);
e.set(n - 1, z);
e.set(n, -z);
}
n = n - 2;
iter = 0;
// No convergence yet
} else {
// Form shift
x = H.get(n, n);
y = 0.0;
w = 0.0;
if (l < n) {
y = H.get(n - 1, n - 1);
w = H.get(n, n - 1) * H.get(n - 1, n);
}
// Wilkinson's original ad hoc shift
if (iter == 10) {
exshift += x;
for (int i = low; i <= n; i++) {
H.updateAt(i, i, Matrices.asMinusFunction(x));
}
s = Math.abs(H.get(n, n - 1))
+ Math.abs(H.get(n - 1, n - 2));
x = y = 0.75 * s; // WTF ???
w = -0.4375 * s * s; // Are you kidding me???
}
// MATLAB's new ad hoc shift
if (iter == 30) {
s = (y - x) / 2.0;
s = s * s + w;
if (s > 0) {
s = Math.sqrt(s);
if (y < x) {
s = -s;
}
s = x - w / ((y - x) / 2.0 + s);
for (int i = low; i <= n; i++) {
H.updateAt(i, i, Matrices.asMinusFunction(s));
}
exshift += s;
x = y = w = 0.964;
}
}
iter = iter + 1; // (Could check iteration count here.)
// Look for two consecutive small sub-diagonal elements
int m = n - 2;
while (m >= l) {
z = H.get(m, m);
r = x - z;
s = y - z;
p = (r * s - w) / H.get(m + 1, m)
+ H.get(m, m + 1);
q = H.get(m + 1, m + 1) - z - r - s;
r = H.get(m + 2, m + 1);
s = Math.abs(p) + Math.abs(q) + Math.abs(r);
p = p / s;
q = q / s;
r = r / s;
if (m == l) {
break;
}
if (Math.abs(H.get(m, m - 1)) * (Math.abs(q) + Math.abs(r)) < eps
* (Math.abs(p) * (Math.abs(H.get(m - 1, m - 1))
+ Math.abs(z) + Math.abs(H.get(m + 1, m + 1))))) {
break;
}
m--;
}
for (int i = m + 2; i <= n; i++) {
H.set(i, i - 2, 0.0);
if (i > m + 2) {
H.set(i, i - 3, 0.0);
}
}
// Double QR step involving rows l:n and columns m:n
for (int k = m; k <= n - 1; k++) {
boolean notlast = (k != n - 1);
if (k != m) {
p = H.get(k, k - 1);
q = H.get(k + 1, k - 1);
r = (notlast ? H.get(k + 2, k - 1) : 0.0);
x = Math.abs(p) + Math.abs(q) + Math.abs(r);
if (x == 0.0) {
continue;
}
p = p / x;
q = q / x;
r = r / x;
}
s = Math.sqrt(p * p + q * q + r * r);
if (p < 0) {
s = -s;
}
if (s != 0) {
if (k != m) {
H.set(k, k - 1, -s * x);
} else if (l != m) {
H.updateAt(k, k - 1, Matrices.INV_FUNCTION);
}
p = p + s;
x = p / s;
y = q / s;
z = r / s;
q = q / p;
r = r / p;
// Row modification
for (int j = k; j < nn; j++) {
p = H.get(k, j) + q * H.get(k + 1, j);
if (notlast) {
p = p + r * H.get(k + 2, j);
H.updateAt(k + 2, j,
Matrices.asMinusFunction(p * z));
}
H.updateAt(k, j, Matrices.asMinusFunction(p * x));
H.updateAt(k + 1, j, Matrices.asMinusFunction(p * y));
}
// Column modification
for (int i = 0; i <= Math.min(n, k + 3); i++) {
p = x * H.get(i, k) + y
* H.get(i, k + 1);
if (notlast) {
p = p + z * H.get(i, k + 2);
H.updateAt(i, k + 2,
Matrices.asMinusFunction(p * r));
}
H.updateAt(i, k, Matrices.asMinusFunction(p));
H.updateAt(i, k + 1, Matrices.asMinusFunction(p * q));
}
// Accumulate transformations
for (int i = low; i <= high; i++) {
p = x * V.get(i, k) + y
* V.get(i, k + 1);
if (notlast) {
p = p + z * V.get(i, k + 2);
V.updateAt(i, k + 2,
Matrices.asMinusFunction(p * r));
}
V.updateAt(i, k, Matrices.asMinusFunction(p));
V.updateAt(i, k + 1, Matrices.asMinusFunction(p * q));
}
} // (s != 0)
} // k loop
} // check convergence
} // while (n >= low)
// Backsubstitute to find vectors of upper triangular form
if (norm == 0.0) {
return;
}
for (n = nn - 1; n >= 0; n--) {
p = d.get(n);
q = e.get(n);
// Real vector
if (q == 0) {
int l = n;
H.set(n, n, 1.0);
for (int i = n - 1; i >= 0; i--) {
w = H.get(i, i) - p;
r = 0.0;
for (int j = l; j <= n; j++) {
r = r + H.get(i, j) * H.get(j, n);
}
if (e.get(i) < 0.0) {
z = w;
s = r;
} else {
l = i;
if (e.get(i) == 0.0) {
if (w != 0.0) {
H.set(i, n, -r / w);
} else {
H.set(i, n, -r / (eps * norm));
}
// Solve real equations
} else {
x = H.get(i, i + 1);
y = H.get(i + 1, i);
q = (d.get(i) - p) * (d.get(i) - p)
+ e.get(i) * e.get(i);
t = (x * s - z * r) / q;
H.set(i, n, t);
if (Math.abs(x) > Math.abs(z)) {
H.set(i + 1, n, (-r - w * t) / x);
} else {
H.set(i + 1, n, (-s - y * t) / z);
}
}
// Overflow control
t = Math.abs(H.get(i, n));
if ((eps * t) * t > 1) {
for (int j = i; j <= n; j++) {
H.updateAt(j, n, Matrices.asDivFunction(t));
}
}
}
}
// Complex vector
} else if (q < 0) {
int l = n - 1;
// Last vector component imaginary so matrix is triangular
if (Math.abs(H.get(n, n - 1))
> Math.abs(H.get(n - 1, n))) {
H.set(n - 1, n - 1, q / H.get(n, n - 1));
H.set(n - 1, n, -(H.get(n, n) - p)
/ H.get(n, n - 1));
} else {
double[] cdiv = cdiv(0.0, -H.get(n - 1, n),
H.get(n - 1, n - 1) - p, q);
H.set(n - 1, n - 1, cdiv[0]);
H.set(n - 1, n, cdiv[1]);
}
H.set(n, n - 1, 0.0);
H.set(n, n, 1.0);
for (int i = n - 2; i >= 0; i--) {
double ra;
double sa;
double vr;
double vi;
ra = 0.0;
sa = 0.0;
for (int j = l; j <= n; j++) {
ra = ra + H.get(i, j) * H.get(j, n - 1);
sa = sa + H.get(i, j) * H.get(j, n);
}
w = H.get(i, i) - p;
if (e.get(i) < 0.0) {
z = w;
r = ra;
s = sa;
} else {
l = i;
if (e.get(i) == 0) {
double[] cdiv = cdiv(-ra, -sa, w, q);
H.set(i, n - 1, cdiv[0]);
H.set(i, n, cdiv[1]);
} else {
// Solve complex equations
x = H.get(i, i + 1);
y = H.get(i + 1, i);
vr = (d.get(i) - p) * (d.get(i) - p)
+ e.get(i) * e.get(i) - q * q;
vi = (d.get(i) - p) * 2.0 * q;
if (vr == 0.0 & vi == 0.0) {
vr = eps
* norm
* (Math.abs(w) + Math.abs(q)
+ Math.abs(x) + Math.abs(y) + Math
.abs(z));
}
double[] cdiv = cdiv(x * r - z * ra + q * sa,
x * s - z * sa - q * ra, vr, vi);
H.set(i, n - 1, cdiv[0]);
H.set(i, n, cdiv[1]);
if (Math.abs(x) > (Math.abs(z) + Math.abs(q))) {
H.set(i + 1, n - 1, (-ra - w
* H.get(i, n - 1) + q
* H.get(i, n)) / x);
H.set(i + 1, n, (-sa - w
* H.get(i, n) - q
* H.get(i, n - 1)) / x);
} else {
cdiv = cdiv(-r - y
* H.get(i, n - 1), -s - y
* H.get(i, n), z, q);
H.set(i + 1, n - 1, cdiv[0]);
H.set(i + 1, n, cdiv[1]);
}
}
// Overflow control
t = Math.max(Math.abs(H.get(i, n - 1)),
Math.abs(H.get(i, n)));
if ((eps * t) * t > 1) {
for (int j = i; j <= n; j++) {
H.updateAt(j, n - 1, Matrices.asDivFunction(t));
H.updateAt(j, n, Matrices.asDivFunction(t));
}
}
}
}
}
}
// Vectors of isolated roots
for (int i = 0; i < nn; i++) {
if (i < low | i > high) {
for (int j = i; j < nn; j++) {
V.set(i, j, H.get(i, j));
}
}
}
// Back transformation to get eigenvectors of original matrix
for (int j = nn - 1; j >= low; j--) {
for (int i = low; i <= high; i++) {
z = 0.0;
for (int k = low; k <= Math.min(j, high); k++) {
z = z + V.get(i, k) * H.get(k, j);
}
V.set(i, j, z);
}
}
}
private double[] cdiv(double xr, double xi, double yr, double yi) {
double cdivr;
double cdivi;
double r;
double d;
if (Math.abs(yr) > Math.abs(yi)) {
r = yi / yr;
d = yr + r * yi;
cdivr = (xr + r * xi) / d;
cdivi = (xi - r * xr) / d;
} else {
r = yr / yi;
d = yi + r * yr;
cdivr = (r * xr + xi) / d;
cdivi = (r * xi - xr) / d;
}
return new double[] { cdivr, cdivi };
}
}<|fim▁end|> | for (int j = n - 1; j < nn; j++) {
z = H.get(n - 1, j);
H.set(n - 1, j, q * z + p * H.get(n, j)); |
<|file_name|>targetsegfault.py<|end_file_name|><|fim▁begin|>def getitem(list, index):
return list[index]
def entry_point(i):
return getitem([i, 2, 3, 4], 2) + getitem(None, i)
def target(*args):
return entry_point, [int]
def get_llinterp_args():
return [1]
# _____ Run translated _____
def run(c_entry_point):<|fim▁hole|><|fim▁end|> | c_entry_point(0) |
<|file_name|>powerball.py<|end_file_name|><|fim▁begin|>"""Author: Ben Johnstone"""
#TODO
# 1 Do argparse for main
# 3 Figure out what statistics should be reported
# 4 set up logger
# 5 winnings calculator???
# 6 Speed benchmarking??
# 7 Get drawings file from internet
# 8 Graph count of each ball
# Database
def Main():
parser = argparse.ArgumentParser(description="")
parser.add_argument("--file", "-f", required=True,
help="Name of the file containing the historical powerball drawings")
parser.add_argument("--jackpot", "-j", help="Optional excel file containing the number of " \
+ "jackpot winners for each drawing")
parser.add_argument("--leastRecent", "-l", help="Display the white and red numbers in order " \
+ "from least to most recently drawn", action="store_true")
parser.add_argument("--mostCommon", "-m", help="Display the white and red numbers in order " \
+ "of commonality in the results from most to least common",
action="store_true")
args = parser.parse_args()<|fim▁hole|> if not (args.leastRecent or args.mostCommon):
print("Must use at least one of --leastRecent or --mostCommon")
return
drawings = ParseDrawingsFile(args.file)
if args.jackpot:
ParseJackpotFile(args.jackpot)
if args.leastRecent:
print("Least recent white balls:")
print(LeastRecentWhites(drawings))
print("Least recent red balls:")
print(LeastRecentReds(drawings))
if args.mostCommon:
print("Most common white balls:")
print(MostCommonWhites(drawings))
print("Most common red balls:")
print(MostCommonReds(drawings))
if __name__ == "__main__":
Main()<|fim▁end|> | |
<|file_name|>RegisterScreen.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
import { connect } from 'react-redux';
import { View } from 'react-native';
import sb from 'react-native-style-block';
import { TButton, TFormGroup } from '../components/TComponent';
import { showLoading, showAlert, hideAlert } from '@shared/redux/actions/ui';
import { register } from '@shared/redux/actions/user';
import { StackScreenProps } from '@react-navigation/stack';
import { RootStackParamList } from '@app/router';
interface Props extends StackScreenProps<RootStackParamList, 'Register'> {
dispatch: any;
}
interface State {
username: string;
password: string;
passwordRepeat: string;
}
class RegisterScreen extends React.Component<Props, State> {
constructor(props) {
super(props);
this.state = {
username: '',
password: '',
passwordRepeat: '',
};
}
checkInputErr() {
const { username, password, passwordRepeat } = this.state;
if (!username) {
return '用户名不能为空';
}
if (!/^[A-Za-z\d]{5,16}$/.test(username)) {
return '用户名必须为5到16位英文或数字';
}
if (!password) {
return '密码不能为空';
}
if (!/^[A-Za-z\d]{5,16}$/.test(password)) {
return '密码必须为5到16位英文或数字';
}
if (password !== passwordRepeat) {
return '重复密码不一致';
}
return '';
}
handleRegister() {
const err = this.checkInputErr();
if (err) {
this.props.dispatch(
showAlert({
title: '格式错误',
content: err,
})
);
} else {
this.props.dispatch(showLoading());
const username = this.state.username;
const password = this.state.password;
this.props.dispatch(
register(username, password, () => {
this.props.dispatch(
showAlert({
content: '注册成功',
onConfirm: () => {
this.props.dispatch(hideAlert());
this.props.navigation.goBack();
},
})
);
})
);
}
}
render() {
return (
<View style={styles.container}>
<TFormGroup
label="用户名"
value={this.state.username}
onChangeText={(username) => this.setState({ username })}
input={{
placeholder: '请输入用户名',
}}
/>
<TFormGroup
label="密码"
value={this.state.password}
onChangeText={(password) => this.setState({ password })}
input={{
placeholder: '请输入密码',
secureTextEntry: true,
}}
/>
<TFormGroup
label="重复密码"
value={this.state.passwordRepeat}
onChangeText={(passwordRepeat) => this.setState({ passwordRepeat })}
input={{
placeholder: '请再次输入密码',
secureTextEntry: true,
}}<|fim▁hole|> />
<TButton onPress={() => this.handleRegister()}>成为祭品</TButton>
</View>
);
}
}
const styles = {
container: [
// sb.alignCenter(),
sb.flex(),
sb.padding(20, 20, 0),
],
};
export default connect()(RegisterScreen);<|fim▁end|> | |
<|file_name|>Converter.cc<|end_file_name|><|fim▁begin|>#include "Converter.h"
#include <TFormula.h>
#include <iomanip>
#include <sstream>
std::string Converter::doubleToString(double x,int precision,bool scientifiStyle)
{
std::stringstream xs;
if(scientifiStyle)
xs<<std::scientific;
else
xs<<std::fixed;
xs<<std::setprecision(precision)<<x;
return xs.str();
};
std::string Converter::intToString(int x)
{
return doubleToString(x,0);
};
double Converter::stringToDouble(std::string formula)
{
TFormula myf("myf",formula.c_str());
return myf.Eval(0);
}
int Converter::stringToInt(std::string formula)
{
return (int)(stringToDouble(formula));<|fim▁hole|><|fim▁end|> | } |
<|file_name|>client.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package dynamic provides a client interface to arbitrary Kubernetes
// APIs that exposes common high level operations and exposes common
// metadata.
package dynamic
import (
"encoding/json"
"errors"
"io"
"net/url"
"strings"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/conversion/queryparams"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/client/restclient"
"k8s.io/kubernetes/pkg/util/flowcontrol"
)
// Client is a Kubernetes client that allows you to access metadata
// and manipulate metadata of a Kubernetes API group.
type Client struct {
cl *restclient.RESTClient
parameterCodec runtime.ParameterCodec
}
// NewClient returns a new client based on the passed in config. The
// codec is ignored, as the dynamic client uses it's own codec.
func NewClient(conf *restclient.Config) (*Client, error) {
// avoid changing the original config
confCopy := *conf
conf = &confCopy
contentConfig := ContentConfig()
contentConfig.GroupVersion = conf.GroupVersion
if conf.NegotiatedSerializer != nil {
contentConfig.NegotiatedSerializer = conf.NegotiatedSerializer
}
conf.ContentConfig = contentConfig
if conf.APIPath == "" {
conf.APIPath = "/api"
}
if len(conf.UserAgent) == 0 {
conf.UserAgent = restclient.DefaultKubernetesUserAgent()
}
cl, err := restclient.RESTClientFor(conf)
if err != nil {
return nil, err
}
return &Client{cl: cl}, nil
}
// GetRateLimiter returns rate limier.
func (c *Client) GetRateLimiter() flowcontrol.RateLimiter {
return c.cl.GetRateLimiter()
}
// Resource returns an API interface to the specified resource for this client's
// group and version. If resource is not a namespaced resource, then namespace
// is ignored. The ResourceClient inherits the parameter codec of c.
func (c *Client) Resource(resource *metav1.APIResource, namespace string) *ResourceClient {
return &ResourceClient{
cl: c.cl,
resource: resource,
ns: namespace,
parameterCodec: c.parameterCodec,
}
}
// ParameterCodec returns a client with the provided parameter codec.
func (c *Client) ParameterCodec(parameterCodec runtime.ParameterCodec) *Client {
return &Client{
cl: c.cl,
parameterCodec: parameterCodec,
}
}
// ResourceClient is an API interface to a specific resource under a
// dynamic client.
type ResourceClient struct {
cl *restclient.RESTClient
resource *metav1.APIResource
ns string
parameterCodec runtime.ParameterCodec
}
// List returns a list of objects for this resource.
func (rc *ResourceClient) List(opts runtime.Object) (runtime.Object, error) {
parameterEncoder := rc.parameterCodec
if parameterEncoder == nil {
parameterEncoder = defaultParameterEncoder
}
return rc.cl.Get().
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
VersionedParams(opts, parameterEncoder).
Do().
Get()
}
// Get gets the resource with the specified name.
func (rc *ResourceClient) Get(name string) (*unstructured.Unstructured, error) {
result := new(unstructured.Unstructured)
err := rc.cl.Get().
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
Name(name).
Do().
Into(result)
return result, err
}
// Delete deletes the resource with the specified name.
func (rc *ResourceClient) Delete(name string, opts *v1.DeleteOptions) error {
return rc.cl.Delete().
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
Name(name).
Body(opts).
Do().
Error()
}
// DeleteCollection deletes a collection of objects.
func (rc *ResourceClient) DeleteCollection(deleteOptions *v1.DeleteOptions, listOptions runtime.Object) error {
parameterEncoder := rc.parameterCodec
if parameterEncoder == nil {
parameterEncoder = defaultParameterEncoder
}
return rc.cl.Delete().
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
VersionedParams(listOptions, parameterEncoder).
Body(deleteOptions).
Do().
Error()
}
// Create creates the provided resource.
func (rc *ResourceClient) Create(obj *unstructured.Unstructured) (*unstructured.Unstructured, error) {
result := new(unstructured.Unstructured)
err := rc.cl.Post().
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
Body(obj).
Do().
Into(result)
return result, err
}
// Update updates the provided resource.
func (rc *ResourceClient) Update(obj *unstructured.Unstructured) (*unstructured.Unstructured, error) {
result := new(unstructured.Unstructured)
if len(obj.GetName()) == 0 {
return result, errors.New("object missing name")
}
err := rc.cl.Put().
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
Name(obj.GetName()).
Body(obj).
Do().
Into(result)
return result, err
}
// Watch returns a watch.Interface that watches the resource.
func (rc *ResourceClient) Watch(opts runtime.Object) (watch.Interface, error) {
parameterEncoder := rc.parameterCodec
if parameterEncoder == nil {
parameterEncoder = defaultParameterEncoder
}
return rc.cl.Get().
Prefix("watch").
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
VersionedParams(opts, parameterEncoder).
Watch()
}
func (rc *ResourceClient) Patch(name string, pt api.PatchType, data []byte) (*unstructured.Unstructured, error) {
result := new(unstructured.Unstructured)
err := rc.cl.Patch(pt).
NamespaceIfScoped(rc.ns, rc.resource.Namespaced).
Resource(rc.resource.Name).
Name(name).
Body(data).
Do().
Into(result)
return result, err
}
// dynamicCodec is a codec that wraps the standard unstructured codec
// with special handling for Status objects.
type dynamicCodec struct{}
func (dynamicCodec) Decode(data []byte, gvk *schema.GroupVersionKind, obj runtime.Object) (runtime.Object, *schema.GroupVersionKind, error) {
obj, gvk, err := unstructured.UnstructuredJSONScheme.Decode(data, gvk, obj)
if err != nil {
return nil, nil, err
}
if _, ok := obj.(*metav1.Status); !ok && strings.ToLower(gvk.Kind) == "status" {
obj = &metav1.Status{}
err := json.Unmarshal(data, obj)
if err != nil {
return nil, nil, err
}
}
return obj, gvk, nil
}
func (dynamicCodec) Encode(obj runtime.Object, w io.Writer) error {
return unstructured.UnstructuredJSONScheme.Encode(obj, w)
}
// ContentConfig returns a restclient.ContentConfig for dynamic types.
func ContentConfig() restclient.ContentConfig {
var jsonInfo runtime.SerializerInfo
// TODO: api.Codecs here should become "pkg/apis/server/scheme" which is the minimal core you need
// to talk to a kubernetes server
for _, info := range api.Codecs.SupportedMediaTypes() {
if info.MediaType == runtime.ContentTypeJSON {
jsonInfo = info
break
}
}
jsonInfo.Serializer = dynamicCodec{}
jsonInfo.PrettySerializer = nil
return restclient.ContentConfig{
AcceptContentTypes: runtime.ContentTypeJSON,
ContentType: runtime.ContentTypeJSON,
NegotiatedSerializer: serializer.NegotiatedSerializerWrapper(jsonInfo),
}
}
// paramaterCodec is a codec converts an API object to query
// parameters without trying to convert to the target version.
type parameterCodec struct{}
func (parameterCodec) EncodeParameters(obj runtime.Object, to schema.GroupVersion) (url.Values, error) {
return queryparams.Convert(obj)
}
func (parameterCodec) DecodeParameters(parameters url.Values, from schema.GroupVersion, into runtime.Object) error {
return errors.New("DecodeParameters not implemented on dynamic parameterCodec")
}
var defaultParameterEncoder runtime.ParameterCodec = parameterCodec{}
type versionedParameterEncoderWithV1Fallback struct{}
func (versionedParameterEncoderWithV1Fallback) EncodeParameters(obj runtime.Object, to schema.GroupVersion) (url.Values, error) {
ret, err := api.ParameterCodec.EncodeParameters(obj, to)
if err != nil && runtime.IsNotRegisteredError(err) {
// fallback to v1<|fim▁hole|> return ret, err
}
func (versionedParameterEncoderWithV1Fallback) DecodeParameters(parameters url.Values, from schema.GroupVersion, into runtime.Object) error {
return errors.New("DecodeParameters not implemented on versionedParameterEncoderWithV1Fallback")
}
// VersionedParameterEncoderWithV1Fallback is useful for encoding query
// parameters for thirdparty resources. It tries to convert object to the
// specified version before converting it to query parameters, and falls back to
// converting to v1 if the object is not registered in the specified version.
// For the record, currently API server always treats query parameters sent to a
// thirdparty resource endpoint as v1.
var VersionedParameterEncoderWithV1Fallback runtime.ParameterCodec = versionedParameterEncoderWithV1Fallback{}<|fim▁end|> | return api.ParameterCodec.EncodeParameters(obj, v1.SchemeGroupVersion)
} |
<|file_name|>native-menu-ref.ts<|end_file_name|><|fim▁begin|>import { NgZone } from '@angular/core';
import { Menu, MenuItem, MenuItemConstructorOptions, PopupOptions, remote } from 'electron';
import { Observable, Subject } from 'rxjs';
let uniqueId = 0;
interface MenuItemWithId extends MenuItem {
id?: string;
}
export class NativeMenuRef {
readonly id: string = `native-menu-${uniqueId++}`;
private readonly _menu: Menu;
private _clickedMenuItem: MenuItemWithId | null = null;
private readonly _afterClosed = new Subject<MenuItemWithId | null>();
constructor(
template: MenuItemConstructorOptions[],
options: PopupOptions,
private ngZone: NgZone,
) {
this._menu = remote.Menu.buildFromTemplate(
this._templateWithCallback(template),
);
options.callback = () => {
this.ngZone.run(() => {
this._afterClosed.next(this._clickedMenuItem);
});
};
this._menu.popup(options);
}
close(): void {
this._menu.closePopup();<|fim▁hole|> }
private _templateWithCallback(
template: MenuItemConstructorOptions[],
): MenuItemConstructorOptions[] {
const cloned = [...template];
cloned.forEach((item) => {
if (item.type === 'submenu' && item.submenu) {
item.submenu = this._templateWithCallback(
item.submenu as MenuItemConstructorOptions[],
);
}
const ref = this;
item.click = (menuItem) => {
ref._clickedMenuItem = menuItem;
};
});
return cloned;
}
}<|fim▁end|> | }
afterClosed(): Observable<MenuItemWithId | null> {
return this._afterClosed.asObservable(); |
<|file_name|>dataobject.py<|end_file_name|><|fim▁begin|># Copyright (c) 2009-2010 Six Apart Ltd.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Six Apart Ltd. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE<|fim▁hole|># INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
`DataObject` is a class of object that provides coding between object
attributes and dictionaries, suitable for
In `DataObject` is the mechanism for converting between dictionaries and
objects. These conversions are performed with aid of `Field` instances
declared on `DataObject` subclasses. `Field` classes reside in the
`remoteobjects.field` module.
"""
from copy import deepcopy
import logging
import remoteobjects.fields
classes_by_name = {}
classes_by_constant_field = {}
def find_by_name(name):
"""Finds and returns the DataObject subclass with the given name.
Parameter `name` should be a bare class name with no module. If there is
no class by that name, raises `KeyError`.
"""
return classes_by_name[name]
class DataObjectMetaclass(type):
"""Metaclass for `DataObject` classes.
This metaclass installs all `remoteobjects.fields.Property` instances
declared as attributes of the new class, including all `Field` and `Link`
instances.
This metaclass also makes the new class findable through the
`dataobject.find_by_name()` function.
"""
def __new__(cls, name, bases, attrs):
"""Creates and returns a new `DataObject` class with its declared
fields and name."""
fields = {}
new_fields = {}
new_properties = {}
# Inherit all the parent DataObject classes' fields.
for base in bases:
if isinstance(base, DataObjectMetaclass):
fields.update(base.fields)
# Move all the class's attributes that are Fields to the fields set.
for attrname, field in attrs.items():
if isinstance(field, remoteobjects.fields.Property):
new_properties[attrname] = field
if isinstance(field, remoteobjects.fields.Field):
new_fields[attrname] = field
elif attrname in fields:
# Throw out any parent fields that the subclass defined as
# something other than a Field.
del fields[attrname]
fields.update(new_fields)
attrs['fields'] = fields
obj_cls = super(DataObjectMetaclass, cls).__new__(cls, name, bases, attrs)
for field, value in new_properties.items():
obj_cls.add_to_class(field, value)
# Register the new class so Object fields can have forward-referenced it.
classes_by_name[name] = obj_cls
# Tell this class's fields what this class is, so they can find their
# forward references later.
for field in new_properties.values():
field.of_cls = obj_cls
return obj_cls
def add_to_class(cls, name, value):
try:
value.install(name, cls)
except (NotImplementedError, AttributeError):
setattr(cls, name, value)
class DataObject(object):
"""An object that can be decoded from or encoded as a dictionary.
DataObject subclasses should be declared with their different data
attributes defined as instances of fields from the `remoteobjects.fields`
module. For example:
>>> from remoteobjects import dataobject, fields
>>> class Asset(dataobject.DataObject):
... name = fields.Field()
... updated = fields.Datetime()
... author = fields.Object('Author')
...
A DataObject's fields then provide the coding between live DataObject
instances and dictionaries.
"""
__metaclass__ = DataObjectMetaclass
def __init__(self, **kwargs):
"""Initializes a new `DataObject` with the given field values."""
self.api_data = {}
self.__dict__.update(kwargs)
def __eq__(self, other):
"""Returns whether two `DataObject` instances are equivalent.
If the `DataObject` instances are of the same type and contain the
same data in all their fields, the objects are equivalent.
"""
if type(self) != type(other):
return False
for k, v in self.fields.iteritems():
if isinstance(v, remoteobjects.fields.Field):
if getattr(self, k) != getattr(other, k):
return False
return True
def __ne__(self, other):
"""Returns whether two `DataObject` instances are different.
`DataObject` instances are different if they are not equivalent as
determined through `__eq__()`.
"""
return not self == other
@classmethod
def statefields(cls):
return cls.fields.keys() + ['api_data']
def __getstate__(self):
return dict((k, self.__dict__[k]) for k in self.statefields()
if k in self.__dict__)
def get(self, attr, *args):
return getattr(self, attr, *args)
def __iter__(self):
for key in self.fields.keys():
yield key
def to_dict(self):
"""Encodes the DataObject to a dictionary."""
# Start with the last set of data we got from the API
data = deepcopy(self.api_data)
# Now replace the data with what's actually in our object
for field_name, field in self.fields.iteritems():
value = getattr(self, field.attrname, None)
if value is not None:
data[field.api_name] = field.encode(value)
else:
data[field.api_name] = None
# Now delete any fields that ended up being None
# since we should exclude them in the resulting dict.
for k in data.keys():
if data[k] is None:
del data[k]
return data
@classmethod
def from_dict(cls, data):
"""Decodes a dictionary into a new `DataObject` instance."""
self = cls()
self.update_from_dict(data)
return self
def update_from_dict(self, data):
"""Adds the content of a dictionary to this DataObject.
Parameter `data` is the dictionary from which to update the object.
Use this only when receiving newly updated or partial content for a
DataObject; that is, when the data is from the outside data source and
needs decoded through the object's fields. Data from "inside" your
application should be added to an object manually by setting the
object's attributes. Data that constitutes a new object should be
turned into another object with `from_dict()`.
"""
if not isinstance(data, dict):
raise TypeError
# Clear any local instance field data
for k in self.fields.iterkeys():
if k in self.__dict__:
del self.__dict__[k]
self.api_data = data
@classmethod
def subclass_with_constant_field(cls, fieldname, value):
"""Returns the closest subclass of this class that has a `Constant`
field with the given value.
Use this method in combination with the `fields.Constant` field class
to find the most appropriate subclass of `cls` based on a content
field. For example, if you have an ``Asset`` class, but want to
declare subclasses with special behavior based on the ``kind`` field
of the ``Asset`` instances, declare ``kind`` as a `Constant` field on
each subclass. Then when you want to create a new ``Asset`` instance
(as in ``Asset.from_dict()``), you can use this method to select a
more appropriate class to instantiate.
Parameters `fieldname` and `value` are the name and value of the
`Constant` field for which to search respectively.
If a subclass of `cls` has been declared with a `Constant` field of
the given name and value, it will be returned. If multiple subclasses
of `cls` declare a matching `Constant` field, one of the matching
subclasses will be returned, but which subclass is not defined.
"""
try:
clsname = classes_by_constant_field[fieldname][tuple(value)]
except KeyError:
# No matching classes, then.
pass
else:
return find_by_name(clsname)
raise ValueError('No such subclass of %s with field %r equivalent to %r'
% (cls.__name__, fieldname, value))<|fim▁end|> | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
<|file_name|>notification.model.ts<|end_file_name|><|fim▁begin|>import { Utilities } from "../services/utilities";
export class Notification {
public static Create(data: {}) {
let n = new Notification();
Object.assign(n, data);
if (n.date)
n.date = Utilities.parseDate(n.date);
return n;
}
public id: number;
public header: string;<|fim▁hole|> public date: Date;
}<|fim▁end|> | public body: string;
public isRead: boolean;
public isPinned: boolean; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf8
from django.views.generic import ListView, DetailView, CreateView
from django.db.models import Q
from django.http import JsonResponse, HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.shortcuts import render
from pure_pagination.mixins import PaginationMixin
from django.contrib.auth.mixins import LoginRequiredMixin
from django.conf import settings
from books.models import Publish, Author, Book
from books.forms import PublishForm
import json
import logging
logger = logging.getLogger('opsweb')
class PublishListView(LoginRequiredMixin, PaginationMixin, ListView):
'''
动作:getlist, create
'''
model = Publish
template_name = "books/publish_list.html"
context_object_name = "publish_list"
paginate_by = 5
keyword = ''<|fim▁hole|> if self.keyword:
queryset = queryset.filter(Q(name__icontains=self.keyword) |
Q(address__icontains=self.keyword) |
Q(city__icontains=self.keyword))
return queryset
def get_context_data(self, **kwargs):
context = super(PublishListView, self).get_context_data(**kwargs)
context['keyword'] = self.keyword
return context
def post(self, request):
form = PublishForm(request.POST)
if form.is_valid():
form.save()
res = {'code': 0, 'result': '添加出版商成功'}
else:
# form.errors会把验证不通过的信息以对象的形式传到前端,前端直接渲染即可
res = {'code': 1, 'errmsg': form.errors}
print form.errors
return JsonResponse(res, safe=True)
class PublishDetailView(LoginRequiredMixin, DetailView):
'''
动作:getone, update, delete
'''
model = Publish
template_name = "books/publish_detail.html"
context_object_name = 'publish'
next_url = '/books/publishlist/'
def post(self, request, *args, **kwargs):
pk = kwargs.get('pk')
p = self.model.objects.get(pk=pk)
form = PublishForm(request.POST, instance=p)
if form.is_valid():
form.save()
res = {"code": 0, "result": "更新出版商成功", 'next_url': self.next_url}
else:
res = {"code": 1, "errmsg": form.errors, 'next_url': self.next_url}
return render(request, settings.JUMP_PAGE, res)
# return HttpResponseRedirect(reverse('books:publish_detail',args=[pk]))
def delete(self, request, *args, **kwargs):
pk = kwargs.get('pk')
# 通过出版社对象查所在该出版社的书籍,如果有关联书籍不可以删除,没有关联书籍可以删除
try:
obj = self.model.objects.get(pk=pk)
if not obj.book_set.all():
self.model.objects.filter(pk=pk).delete()
res = {"code": 0, "result": "删除出版商成功"}
else:
res = {"code": 1, "errmsg": "该出版社有关联书籍,请联系管理员"}
except:
res = {"code": 1, "errmsg": "删除错误请联系管理员"}
return JsonResponse(res, safe=True)<|fim▁end|> |
def get_queryset(self):
queryset = super(PublishListView, self).get_queryset()
self.keyword = self.request.GET.get('keyword', '').strip() |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># vim:ts=4:sts=4:sw=4:expandtab
import os
import satori.web.setup
def manage():
from django.core.management import execute_manager
import satori.web.settings as settings
# HACK
import django.core.management
old_fmm = django.core.management.find_management_module
def find_management_module(app_name):
if app_name == 'satori.web':
return os.path.join(os.path.dirname(__file__), 'management')
else:
return old_fmm(app_name)
django.core.management.find_management_module = find_management_module
# END OF HACK
<|fim▁hole|> execute_manager(settings)<|fim▁end|> | |
<|file_name|>pidPyart.py<|end_file_name|><|fim▁begin|># lots to do:
# __ native drawLines
# __ add native drawCurve method
# __ native rectangle/round rect method
# __ native drawEllipse
# __ native drawArc
# __ drawImage support (work on Pyart side of things)
from __future__ import print_function
import pyart
from rdkit.sping.pid import *
from rdkit.sping.PDF import pdfmetrics
import Fontmapping # helps by mapping pid font classes to Pyart font names
# note for now I'm just going to do the standard PDF fonts & forget the rest
class PyartCanvas(Canvas):
"note the default face is 'times' and is set in Fontmapping.py"
def __init__(self,size=(300,300),name='PyartCanvas.png'):
self._pycan = pyart.Canvas(size[0], size[1], dpi=72)
self.filename = name
Canvas.__init__(self, size, name)
# self.defaultFillColor = transparent
# now we need to setup our tracking of the defaults vs the current state
# see if the __setattr__ approach is any better than the _updateXX strategy
def __setattr__(self, name, value):
if name == 'defaultLineColor':
if value:
# print('setting defaultLineColor to %s, 0x%x' % (value, value.toHexRGB()))
if value != transparent:
self._pycan.gstate.stroke = value.toHexRGB()
self.__dict__[name] = value
elif name == 'defaultFillColor':
if value:
if value != transparent:
self._pycan.gstate.fill = value.toHexRGB()
self.__dict__[name] = value
elif name == 'defaultLineWidth' :
if value:
self._pycan.gstate.stroke_width = value
self.__dict__[name] = value
elif name == 'defaultFont':
if value:
self.__dict__[name] = value
self._setPyartFont(value)
else: # received None so set to default font face & size=12
self.__dict__[name] = Font(face='times')
self._setPyartFont(self.__dict__[name])
else:
self.__dict__[name] = value
## Private methods ##
def _protectArtState(self, bool):<|fim▁hole|> return bool
def _restoreArtState(self, bool):
if bool:
self._pycan.grestore()
def _setPyartFont(self, fontInstance):
# accounts for "None" option
# does not act on self.defaultFont at all
fontsize = fontInstance.size
self._pycan.gstate.font_size = fontsize
# map pid name for font to Pyart name
pyartname = Fontmapping.getPyartName(fontInstance)
self._pycan.gstate.setfont(pyartname)
# # # # #
### public PID Canvas methods ##
def clear(self):
pass
def flush(self):
pass
def save(self, file=None, format=None):
# fileobj = getFileObject(file)
if not file:
file = self.filename
if isinstance(file, StringType):
self._pycan.save(file)
else:
raise NotImplementedError
def _findExternalFontName(self, font): #copied from piddlePDF by cwl- hack away!
"""Attempts to return proper font name.
PDF uses a standard 14 fonts referred to
by name. Default to self.defaultFont('Helvetica').
The dictionary allows a layer of indirection to
support a standard set of PIDDLE font names."""
piddle_font_map = {
'Times':'Times',
'times':'Times',
'Courier':'Courier',
'courier':'Courier',
'helvetica':'Helvetica',
'Helvetica':'Helvetica',
'symbol':'Symbol',
'Symbol':'Symbol',
'monospaced':'Courier',
'serif':'Times',
'sansserif':'Helvetica',
'ZapfDingbats':'ZapfDingbats',
'zapfdingbats':'ZapfDingbats',
'arial':'Helvetica'
}
try:
face = piddle_font_map[string.lower(font.face)]
except:
return 'Helvetica'
name = face + '-'
if font.bold and face in ['Courier','Helvetica','Times']:
name = name + 'Bold'
if font.italic and face in ['Courier', 'Helvetica']:
name = name + 'Oblique'
elif font.italic and face == 'Times':
name = name + 'Italic'
if name == 'Times-':
name = name + 'Roman'
# symbol and ZapfDingbats cannot be modified!
#trim and return
if name[-1] == '-':
name = name[0:-1]
return name
def stringWidth(self, s, font=None):
if not font:
font = self.defaultFont
fontname = Fontmapping.getPdfName(font)
return pdfmetrics.stringwidth(s, fontname) * font.size * 0.001
def fontAscent(self, font=None):
if not font:
font = self.defaultFont
fontname = Fontmapping.getPdfName(font)
return pdfmetrics.ascent_descent[fontname][0] * 0.001 * font.size
def fontDescent(self, font=None):
if not font:
font = self.defaultFont
fontname = Fontmapping.getPdfName(font)
return -pdfmetrics.ascent_descent[fontname][1] * 0.001 * font.size
def drawLine(self, x1, y1, x2, y2, color=None, width=None):
## standard code ##
color = color or self.defaultLineColor
width = width or self.defaultLineWidth
if color != transparent:
changed = self._protectArtState( (color != self.defaultLineColor) or
(width != self.defaultLineWidth) )
if color != self.defaultLineColor:
self._pycan.gstate.stroke = color.toHexRGB()
# print("color is %s <-> %s" % (color, color.toHexStr()))
if width != self.defaultLineWidth:
self._pycan.gstate.stroke_width = width
###################
# actual drawing
p = pyart.VectorPath(3)
p.moveto_open(x1,y1)
p.lineto(x2,y2)
self._pycan.stroke(p)
## standard code ##
if changed:
self._pycan.grestore()
###################
# def drawLines(self, lineList, color=None, width=None):
# pass
def drawString(self, s, x, y, font=None, color=None, angle=0):
# start w/ the basics
self._pycan.drawString(x,y, s)
def drawPolygon(self, pointlist,
edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
eColor = edgeColor or self.defaultLineColor
fColor = fillColor or self.defaultFillColor
eWidth = edgeWidth or self.defaultLineWidth
changed = self._protectArtState( (eColor != self.defaultLineColor) or
(eWidth != self.defaultLineWidth) or
(fColor != self.defaultFillColor) )
if eColor != self.defaultLineColor:
self._pycan.gstate.stroke = eColor.toHexRGB()
if fColor != self.defaultFillColor:
self._pycan.gstate.fill = fColor.toHexRGB()
if eWidth != self.defaultLineWidth:
self._pycan.gstate.stroke_width = eWidth
path = pyart.VectorPath(len(pointlist)+1)
if closed:
path.moveto_closed(pointlist[0][0], pointlist[0][1])
else:
path.moveto_open(pointlist[0][0], pointlist[0][1])
for pt in pointlist[1:]:
path.lineto(pt[0],pt[1])
if closed:
path.close()
if fColor != transparent and closed:
self._pycan.fill(path)
if eColor != transparent:
self._pycan.stroke(path)
self._restoreArtState(changed)
#def drawCurve(self, x1, y1, x2, y2, x3, y3, x4, y4,
# edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
# pass
# def drawRoundRect(self, x1,y1, x2,y2, rx=8, ry=8,
# edgeColor=None, edgeWidth=None, fillColor=None):
# pass
# def drawEllipse(self, x1,y1, x2,y2, edgeColor=None, edgeWidth=None,
# fillColor=None):
# pass
# def drawArc(self, x1,y1, x2,y2, startAng=0, extent=360, edgeColor=None,
# edgeWidth=None, fillColor=None):
# pass
# def drawFigure(self, partList,
# edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
# pass
# def drawImage(self, image, x1, y1, x2=None,y2=None):
# pass
## basic tests ##
if __name__=='__main__':
import rdkit.sping.tests.pidtest
can = PyartCanvas(size=(300,300), name='basictest.png')
#can.defaultLineColor = Color(0.7, 0.7, 1.0)
#can.drawLine(10,10, 290,290)
#can.drawLine(10,10, 50, 10, color=green, width = 4.5)
rdkit.sping.tests.pidtest.drawBasics(can)
can.save(file='basicTest.png')
print('saving basicTest.png')
can = PyartCanvas(size=(400,400), name='test-strings.png')
rdkit.sping.tests.pidtest.drawStrings(can)
can.save()<|fim▁end|> | if bool:
self._pycan.gsave() |
<|file_name|>MD-MuellerBrown.py<|end_file_name|><|fim▁begin|>from MuellerBrown import getPotentialAndForces
from PlotUtils import PlotUtils
import numpy as np
import matplotlib.pyplot as plt
import MuellerBrown as mbpot
<|fim▁hole|>def getKineticEnergy(velocity):
return 0.5*m*(velocity[0]**2+velocity[1]**2)
dt = 0.01
num_steps = 1000
#initial_position = np.array( [ 0.0 , 0.0 ] )
initial_position = mbpot.saddlePoints[0]
initial_velocity = np.array( [ 1.0 , -1.0 ] )
position = np.zeros([num_steps+1,2])
velocity = np.zeros([num_steps+1,2])
potential_energy = np.zeros(num_steps+1)
kinetic_energy = np.zeros(num_steps+1)
total_energy = np.zeros(num_steps+1)
times = np.arange(num_steps+1)*dt
time = 0.0
position[0,:] = initial_position
velocity[0,:] = initial_velocity
kinetic_energy[0] = getKineticEnergy(initial_velocity)
(pot, force) = getPotentialAndForces(initial_position)
potential_energy[0] = pot
for i in range(0,num_steps):
# get position at t+dt
position[i+1] = position[i] + velocity[i]*dt+0.5*(force/m)*dt**2
# get velocity at t+dt
(new_pot, new_force) = getPotentialAndForces(position[i+1])
velocity[i+1] = velocity[i] + (0.5/m)*(new_force+force)*dt
# add stuff
kinetic_energy[i+1] = getKineticEnergy(velocity[i+1])
potential_energy[i+1] = new_pot
force = new_force
total_energy = potential_energy + kinetic_energy
pu = PlotUtils(mbpot,[200,200])
pu.plotPotential(trajectory=position)
plt.figure(1)
plt.plot(times,position[:,0])
plt.figure(2)
plt.plot(times,position[:,1])
plt.figure(3)
plt.plot(times,potential_energy)
plt.figure(4)
plt.plot(times,kinetic_energy)
plt.figure(5)
plt.ylim(0, np.max(total_energy)+1.0)
plt.plot(times,total_energy)
plt.show()<|fim▁end|> | m=1.0
|
<|file_name|>IProjectVariables.java<|end_file_name|><|fim▁begin|>package flow;
/**
* This interface is used to define the name of variables that are
* declared in the call flow. All variables are defined as
* <code>public static final String</code>, which allows user-defined
* code to reference variable names by the Java variable name.
* Last generated by Orchestration Designer at: 2013-FEB-03 08:27:04 PM
*/
public interface IProjectVariables {
//{{START:PROJECT:VARIABLES
/**
* This is a reserved block of variable name definitions.
* The variable names defined here can be used as the key
* to get the <code>com.avaya.sce.runtime.Variable</code>
* from the <code>SCESession</code> at runtime.<br>
*
* For example, given a variable name <code>phoneNum</code>,
* user-defined code should access the variable in this format:<PRE>
* Variable phNum = mySession.getVariable(IProjectVariables.PHONE_NUM);
* if ( phNum != null ) {
* // do something with the variable
* }</PRE>
*
* This block of code is generated automatically by Orchestration Designer and should not
* be manually edited as changes may be overwritten by future code generation.
* Last generated by Orchestration Designer at: 2013-JUN-19 09:09:36 PM
*/
public static final String MAIN_MENU = "MainMenu";
public static final String BLIND_TRANSFER = "BlindTransfer";
public static final String TIME = "time";
public static final String REDIRECTINFO = "redirectinfo";
public static final String MAIN_MENU_TEXT = "MainMenuText";
public static final String SESSION = "session";
public static final String DD_LAST_EXCEPTION = "ddLastException";
public static final String DATE = "date";
public static final String SHAREDUUI = "shareduui";
//}}END:PROJECT:VARIABLES
//{{START:PROJECT:VARIABLEFIELDS
public static final String MAIN_MENU_FIELD_COLUMN_0 = "Column0";
public static final String MAIN_MENU_FIELD_CONFIDENCE = "confidence";
public static final String MAIN_MENU_FIELD_INPUTMODE = "inputmode";
public static final String MAIN_MENU_FIELD_INTERPRETATION = "interpretation";
public static final String MAIN_MENU_FIELD_NOINPUTCOUNT = "noinputcount";
public static final String MAIN_MENU_FIELD_NOMATCHCOUNT = "nomatchcount";
public static final String MAIN_MENU_FIELD_UTTERANCE = "utterance";
public static final String MAIN_MENU_FIELD_VALUE = "value";
public static final String TIME_FIELD_AUDIO = "audio";
public static final String TIME_FIELD_HOUR = "hour";
public static final String TIME_FIELD_MILLISECOND = "millisecond";
public static final String TIME_FIELD_MINUTE = "minute";
public static final String TIME_FIELD_SECOND = "second";
public static final String TIME_FIELD_TIMEZONE = "timezone";
public static final String REDIRECTINFO_FIELD_PRESENTATIONINFO = "presentationinfo";
public static final String REDIRECTINFO_FIELD_REASON = "reason";
public static final String REDIRECTINFO_FIELD_SCREENINGINFO = "screeninginfo";
public static final String REDIRECTINFO_FIELD_URI = "uri";
public static final String SESSION_FIELD_AAI = "aai";
public static final String SESSION_FIELD_ANI = "ani";
public static final String SESSION_FIELD_CALLTAG = "calltag";
public static final String SESSION_FIELD_CHANNEL = "channel";
public static final String SESSION_FIELD_CONVERSEFIRST = "conversefirst";
public static final String SESSION_FIELD_CONVERSESECOND = "conversesecond";
public static final String SESSION_FIELD_CURRENTLANGUAGE = "currentlanguage";
public static final String SESSION_FIELD_DNIS = "dnis";
public static final String SESSION_FIELD_EXIT_CUSTOMER_ID = "exitCustomerId";
public static final String SESSION_FIELD_EXIT_INFO_1 = "exitInfo1";
public static final String SESSION_FIELD_EXIT_INFO_2 = "exitInfo2";
public static final String SESSION_FIELD_EXIT_PREFERRED_PATH = "exitPreferredPath";
public static final String SESSION_FIELD_EXIT_REASON = "exitReason";
public static final String SESSION_FIELD_EXIT_TOPIC = "exitTopic";
public static final String SESSION_FIELD_LASTERROR = "lasterror";
public static final String SESSION_FIELD_MEDIATYPE = "mediatype";
public static final String SESSION_FIELD_MESSAGE_TYPE = "messageType";
public static final String SESSION_FIELD_PROTOCOLNAME = "protocolname";
public static final String SESSION_FIELD_PROTOCOLVERSION = "protocolversion";
public static final String SESSION_FIELD_SESSIONID = "sessionid";
public static final String SESSION_FIELD_SESSIONLABEL = "sessionlabel";
public static final String SESSION_FIELD_SHAREDMODE = "sharedmode";
public static final String SESSION_FIELD_UCID = "ucid";
public static final String SESSION_FIELD_UUI = "uui";
public static final String SESSION_FIELD_VIDEOBITRATE = "videobitrate";
public static final String SESSION_FIELD_VIDEOCODEC = "videocodec";
public static final String SESSION_FIELD_VIDEOENABLED = "videoenabled";<|fim▁hole|> public static final String SESSION_FIELD_VIDEONEARFMTP = "videonearfmtp";
public static final String SESSION_FIELD_VIDEOWIDTH = "videowidth";
public static final String SESSION_FIELD_VPCALLEDEXTENSION = "vpcalledextension";
public static final String SESSION_FIELD_VPCONVERSEONDATA = "vpconverseondata";
public static final String SESSION_FIELD_VPCOVERAGEREASON = "vpcoveragereason";
public static final String SESSION_FIELD_VPCOVERAGETYPE = "vpcoveragetype";
public static final String SESSION_FIELD_VPRDNIS = "vprdnis";
public static final String SESSION_FIELD_VPREPORTURL = "vpreporturl";
public static final String DD_LAST_EXCEPTION_FIELD_ERRORCODE = "errorcode";
public static final String DD_LAST_EXCEPTION_FIELD_MESSAGE = "message";
public static final String DD_LAST_EXCEPTION_FIELD_OBJECT = "object";
public static final String DD_LAST_EXCEPTION_FIELD_STACKTRACE = "stacktrace";
public static final String DD_LAST_EXCEPTION_FIELD_TYPE = "type";
public static final String DATE_FIELD_AUDIO = "audio";
public static final String DATE_FIELD_DAYOFMONTH = "dayofmonth";
public static final String DATE_FIELD_DAYOFWEEK = "dayofweek";
public static final String DATE_FIELD_DAYOFWEEKNUM = "dayofweeknum";
public static final String DATE_FIELD_DAYOFYEAR = "dayofyear";
public static final String DATE_FIELD_MONTH = "month";
public static final String DATE_FIELD_MONTHINYEAR = "monthinyear";
public static final String DATE_FIELD_YEAR = "year";
public static final String SHAREDUUI_FIELD_ID = "id";
public static final String SHAREDUUI_FIELD_VALUE = "value";
//}}END:PROJECT:VARIABLEFIELDS
}<|fim▁end|> | public static final String SESSION_FIELD_VIDEOFARFMTP = "videofarfmtp";
public static final String SESSION_FIELD_VIDEOFORMAT = "videoformat";
public static final String SESSION_FIELD_VIDEOFPS = "videofps";
public static final String SESSION_FIELD_VIDEOHEIGHT = "videoheight"; |
<|file_name|>listing4.02.js<|end_file_name|><|fim▁begin|>/* Get Programming with JavaScript
* Listing 4.02
* Displaying information from similar objects
*/
var movie1;
var movie2;
var movie3;
movie1 = {
title: "Inside Out",
actors: "Amy Poehler, Bill Hader",
directors: "Pete Doctor, Ronaldo Del Carmen"
};
movie2 = {
title: "Spectre",
actors: "Daniel Craig, Christoph Waltz",
directors: "Sam Mendes"
};
movie3 = {
title: "Star Wars: Episode VII - The Force Awakens",
actors: "Harrison Ford, Mark Hamill, Carrie Fisher",
directors: "J.J.Abrams"
};
console.log("Movie information for " + movie1.title);
console.log("------------------------------");
console.log("Actors: " + movie1.actors);
console.log("Directors: " + movie1.directors);
console.log("------------------------------");
console.log("Movie information for " + movie2.title);
console.log("------------------------------");
console.log("Actors: " + movie2.actors);
console.log("Directors: " + movie2.directors);
console.log("------------------------------");
console.log("Movie information for " + movie3.title);
console.log("------------------------------");<|fim▁hole|>console.log("Directors: " + movie3.directors);
console.log("------------------------------");
/* Further Adventures
*
* 1) Add a fourth movie and display its info
*
* 2) All the movie info is in one big block on the console.
* Change the code to space out the different movies.
*
* 3) Create objects to represent three calendar events
*
* 4) Display info from the three events on the console.
*
*/<|fim▁end|> | console.log("Actors: " + movie3.actors); |
<|file_name|>238_productArrayExceptSelf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Given an array of n integers where n > 1, nums, return an array output such that output[i] is equal to the product of all the elements of nums except nums[i].
class Solution(object):
# @param {integer[]} nums
# @return {integer[]}
def productExceptSelf(self, nums):
p = 1
n = len(nums)
output = []
# Forward range.
for i in range(0,n):<|fim▁hole|> p = p * nums[i]
p = 1
# Backword range.
for i in range(n-1,-1,-1):
output[i] = output[i] * p
p = p * nums[i]
return output
nums=[2,3,4,5]
obj = Solution()
output = obj.productExceptSelf(nums)
print(output)<|fim▁end|> | output.append(p) |
<|file_name|>XMLXMLCOSTCATEGORYID.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* HELIUM V, Open Source ERP software for sustained success
* at small and medium-sized enterprises.
* Copyright (C) 2004 - 2015 HELIUM V IT-Solutions GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of theLicense, or
* (at your option) any later version.
*
* According to sec. 7 of the GNU Affero General Public License, version 3,
* the terms of the AGPL are supplemented with the following terms:
*
* "HELIUM V" and "HELIUM 5" are registered trademarks of
* HELIUM V IT-Solutions GmbH. The licensing of the program under the
* AGPL does not imply a trademark license. Therefore any rights, title and
* interest in our trademarks remain entirely with us. If you want to propagate
* modified versions of the Program under the name "HELIUM V" or "HELIUM 5",
* you may only do so if you have a written permission by HELIUM V IT-Solutions
* GmbH (to acquire a permission please contact HELIUM V IT-Solutions
* at [email protected]).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact: [email protected]
******************************************************************************/
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2012.12.03 at 10:12:07 AM MEZ
//
package com.lp.server.schema.opentrans.cc.orderresponse;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for _XML_XML_COST_CATEGORY_ID complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="_XML_XML_COST_CATEGORY_ID">
* <simpleContent>
* <extension base="<http://www.opentrans.org/XMLSchema/1.0>typeCOST_CATEGORY_ID">
* <attGroup ref="{http://www.opentrans.org/XMLSchema/1.0}ComIbmMrmNamespaceInfo154"/>
* <attribute name="type">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}NMTOKEN">
* <minLength value="1"/>
* <maxLength value="32"/>
* <enumeration value="cost_center"/>
* <enumeration value="project"/>
* </restriction>
* </simpleType>
* </attribute>
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "_XML_XML_COST_CATEGORY_ID", propOrder = {
"value"
})
public class XMLXMLCOSTCATEGORYID {
@XmlValue
protected String value;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String type;
@XmlAttribute(name = "xsi_schemaLocation")
protected String xsiSchemaLocation;
@XmlAttribute(name = "xmlns_xsd")
protected String xmlnsXsd;
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
* <|fim▁hole|> public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
/**
* Gets the value of the xsiSchemaLocation property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getXsiSchemaLocation() {
if (xsiSchemaLocation == null) {
return "openbase_1_0.mxsd";
} else {
return xsiSchemaLocation;
}
}
/**
* Sets the value of the xsiSchemaLocation property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setXsiSchemaLocation(String value) {
this.xsiSchemaLocation = value;
}
/**
* Gets the value of the xmlnsXsd property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getXmlnsXsd() {
if (xmlnsXsd == null) {
return "http://www.w3.org/2001/XMLSchema";
} else {
return xmlnsXsd;
}
}
/**
* Sets the value of the xmlnsXsd property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setXmlnsXsd(String value) {
this.xmlnsXsd = value;
}
}<|fim▁end|> | */ |
<|file_name|>RequiredRowOrdering.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Splice Machine.
* Splice Machine is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either
* version 3, or (at your option) any later version.
* Splice Machine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License along with Splice Machine.
* If not, see <http://www.gnu.org/licenses/>.
*
* Some parts of this source code are based on Apache Derby, and the following notices apply to
* Apache Derby:
*
* Apache Derby is a subproject of the Apache DB project, and is licensed under
* the Apache License, Version 2.0 (the "License"); you may not use these files
* except in compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Splice Machine, Inc. has modified the Apache Derby code in this file.
*
* All such Splice Machine modifications are Copyright 2012 - 2020 Splice Machine, Inc.,
* and are licensed to you under the GNU Affero General Public License.
*/
package com.splicemachine.db.iapi.sql.compile;
import com.splicemachine.db.iapi.error.StandardException;
import com.splicemachine.db.iapi.util.JBitSet;
/**
* This interface provides a representation of the required ordering of rows
* from a ResultSet. Different operations can require ordering: ORDER BY,
* DISTINCT, GROUP BY. Some operations, like ORDER BY, require that the
* columns be ordered a particular way, while others, like DISTINCT and
* GROUP BY, reuire only that there be no duplicates in the result.
*/
public interface RequiredRowOrdering
{
int SORT_REQUIRED = 1;
int ELIMINATE_DUPS = 2;
int NOTHING_REQUIRED = 3;
/**
* Tell whether sorting is required for this RequiredRowOrdering,
* given a RowOrdering.
*
* @param rowOrdering The order of rows in question
* @param optimizableList The current join order being considered by
* the optimizer. We need to look into this to determine if the outer
* optimizables are single row resultset if the order by column is
* on an inner optimizable and that inner optimizable is not a one
* row resultset. DERBY-3926
*
* @return SORT_REQUIRED if sorting is required,
* ELIMINATE_DUPS if no sorting is required but duplicates
* must be eliminated (i.e. the rows are in
* the right order but there may be duplicates),
* NOTHING_REQUIRED is no operation is required
*
* @exception StandardException Thrown on error
*/
int sortRequired(RowOrdering rowOrdering, OptimizableList optimizableList) throws StandardException;
/**
* Tell whether sorting is required for this RequiredRowOrdering,
* given a RowOrdering representing a partial join order, and
* a bit map telling what tables are represented in the join order.
* This is useful for reducing the number of cases the optimizer
* has to consider.
*
* @param rowOrdering The order of rows in the partial join order
* @param tableMap A bit map of the tables in the partial join order
* @param optimizableList The current join order being considered by
* the optimizer. We need to look into this to determine if the outer
* optimizables are single row resultset if the order by column is
* on an inner optimizable and that inner optimizable is not a one
* row resultset. DERBY-3926<|fim▁hole|> * ELIMINATE_DUPS if no sorting is required by duplicates
* must be eliminated (i.e. the rows are in
* the right order but there may be duplicates),
* NOTHING_REQUIRED is no operation is required
*
* @exception StandardException Thrown on error
*/
int sortRequired(RowOrdering rowOrdering, JBitSet tableMap, OptimizableList optimizableList) throws StandardException;
/**
* Estimate the cost of doing a sort for this row ordering, given
* the number of rows to be sorted. This does not take into account
* whether the sort is really needed. It also estimates the number of
* result rows.
*
* @param rowOrdering The ordering of the input rows
*
* @exception StandardException Thrown on error
*/
void estimateCost(Optimizer optimizer,
RowOrdering rowOrdering,
CostEstimate baseCost,
CostEstimate sortCost)
throws StandardException;
/**
* Indicate that a sort is necessary to fulfill this required ordering.
* This method may be called many times during a single optimization.
*/
void sortNeeded();
/**
* Indicate that a sort is *NOT* necessary to fulfill this required
* ordering. This method may be called many times during a single
* optimization.
*/
void sortNotNeeded();
/**
* @return Whether or not a sort is needed.
*/
boolean isSortNeeded();
}<|fim▁end|> | *
* @return SORT_REQUIRED if sorting is required, |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* @authors JayChenFE
* @date 2017-03-21 11:17:13
* @version $1.0$
*/
/**
* 主要核心逻辑入口
**/
const fs = require('fs');
const path = require('path');
const staticServer = require('./static-server');
class App {
constructor() {
}
initServer() {
//方便增加别的逻辑
//返回一个函数
return (request, response) => {<|fim▁hole|> let {
url
} = request;
let body = staticServer(url);
response.writeHead(200, 'resolve ok', {
'X-Powered-By': 'Node.js'
});
response.end(body);
// const staticPrefix = path.resolve(process.cwd(), 'public');
// let staticFun = url => {
// if (url == '/') {
// url = '/index.html';
// }
// let _path = getPath(url);
// // fs.readFile(_path, 'binary', (error, data) => {
// // if (error) {
// // data="NOT FOUND";
// // }
// // response.end(data,'binary');
// // });
// fs.readFile(_path, (error, data) => {
// if (error) {
// data = `NOT FOUND ${error.stack}`;
// }
// response.end(data);
// });
// };
// staticFun(url);
// if (url == '/css/index.css') {
// fs.readFile('./public/css/index.css', 'utf-8', (error, data) => {
// response.end(data);
// });
// }
// if (url == '/js/index.js') {
// fs.readFile('./public/js/index.js', 'utf-8', (error, data) => {
// response.end(data);
// });
// }
// if (url == '/') {
// //第一个路径相对的是process.cwd();
// fs.readFile('./public/index.html', 'utf-8', (error, data) => {
// response.end(data);
// });
// }
};
}
}
module.exports = App;<|fim▁end|> | |
<|file_name|>BlueBanana.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
BlueBanana Rat Config Decoder
'''
__description__ = 'BlueBanana Rat Config Extractor'
__author__ = 'Kevin Breen http://techanarchy.net http://malwareconfig.com'
__version__ = '0.1'
__date__ = '2014/04/10'
#Standard Imports Go Here
import os
import sys
import string
from zipfile import ZipFile
from cStringIO import StringIO
from optparse import OptionParser
#Non Standard Imports
try:
from Crypto.Cipher import AES
except ImportError:
print "[+] Couldn't Import Cipher, try 'sudo pip install pycrypto'"
# Main Decode Function Goes Here
'''
data is a read of the file
Must return a python dict of values<|fim▁hole|> newZip = StringIO(data)
with ZipFile(newZip) as zip:
for name in zip.namelist(): # get all the file names
if name == "config.txt": # this file contains the encrypted config
conFile = zip.read(name)
if conFile: #
confRaw = decryptConf(conFile)
conf = configParse(confRaw)
return conf
#Helper Functions Go Here
def DecryptAES(enckey, data):
cipher = AES.new(enckey) # set the cipher
return cipher.decrypt(data) # decrpyt the data
def decryptConf(conFile):
key1 = "15af8sd4s1c5s511"
key2 = "4e3f5a4c592b243f"
first = DecryptAES(key1, conFile.decode('hex'))
second = DecryptAES(key2, first[:-16].decode('hex'))
return second
def configParse(confRaw):
config = {}
clean = filter(lambda x: x in string.printable, confRaw)
list = clean.split("<separator>")
config["Domain"] = list[0]
config["Password"] = list[1]
config["Port1"] = list[2]
config["Port2"] = list[3]
if len(list) > 4:
config["Install Name"] = list[4]
config["Jar Name"] = list[5]
return config
#Recursive Function Goes Here
# Main
if __name__ == "__main__":
parser = OptionParser(usage='usage: %prog inFile outConfig\n' + __description__, version='%prog ' + __version__)
parser.add_option("-r", "--recursive", action='store_true', default=False, help="Recursive Mode")
(options, args) = parser.parse_args()
# If we dont have args quit with help page
if len(args) > 0:
pass
else:
parser.print_help()
sys.exit()
# if we want a recursive extract run this function
if options.recursive == True:
print "[+] Sorry Not Here Yet Come Back Soon"
# If not recurisve try to open file
try:
print "[+] Reading file"
fileData = open(args[0], 'rb').read()
except:
print "[+] Couldn't Open File {0}".format(args[0])
#Run the config extraction
print "[+] Searching for Config"
config = run(fileData)
#If we have a config figure out where to dump it out.
if config == None:
print "[+] Config not found"
sys.exit()
#if you gave me two args im going to assume the 2nd arg is where you want to save the file
if len(args) == 2:
print "[+] Writing Config to file {0}".format(args[1])
with open(args[1], 'a') as outFile:
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
outFile.write("Key: {0}\t Value: {1}\n".format(key,clean_value))
# if no seconds arg then assume you want it printing to screen
else:
print "[+] Printing Config to screen"
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
print " [-] Key: {0}\t Value: {1}".format(key,clean_value)
print "[+] End of Config"<|fim▁end|> | '''
def run(data): |
<|file_name|>conanfile.py<|end_file_name|><|fim▁begin|>from conans import ConanFile, AutoToolsBuildEnvironment, tools
import os
class LibHaruConn(ConanFile):
name = "libharu"
version = "2.3.0"
license = "ZLIB https://github.com/libharu/libharu/blob/master/LICENCE"
url = "https://github.com/trigger-happy/conan-packages"
description = "C library for generating PDF documents"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
generators = "cmake"<|fim▁hole|> self.run("tar xf libharu-RELEASE_2_3_0.tar.gz")
def build(self):
env_build = AutoToolsBuildEnvironment(self)
install_prefix=os.getcwd()
with tools.chdir("libharu-RELEASE_2_3_0"):
with tools.environment_append(env_build.vars):
self.run("touch include/config.h.in")
self.run("aclocal")
self.run("libtoolize")
self.run("automake --add-missing")
self.run("autoconf")
self.run("./configure --prefix={0}".format(install_prefix))
self.run("make install")
def package(self):
self.copy("lib/*", dst="lib", keep_path=False)
self.copy("include/*", dst=".", keep_path=True)
def package_info(self):
self.cpp_info.libs = ["hpdf"]<|fim▁end|> |
def source(self):
pkgLink = 'https://github.com/libharu/libharu/archive/RELEASE_2_3_0.tar.gz'
self.run("curl -JOL " + pkgLink) |
<|file_name|>Fractal.cpp<|end_file_name|><|fim▁begin|>#include "Fractal.h"
Color::Color() : r(0.0), g(0.0), b(0.0) {}
Color::Color(double rin, double gin, double bin) : r(rin), g(gin), b(bin) {}
Fractal::Fractal(int width, int height)
: width_(width), height_(height), center_x_(0.0), center_y_(0.0),
max_distance_sqr_(4.0), max_iteration_(32) {
pixel_size_ = 4.0 / width_;
}
Fractal::~Fractal() {}
void Fractal::Initialize() {
RecalcMins();
CreateColors();
CalculateEscapeTime();
}
void Fractal::CreateColors() {
int i;
double r, g, b;
colors_.resize(max_iteration_ + 1);
for (i = 0; i < max_iteration_; i++) {
r = 1.0 * i / (double) max_iteration_;
g = 0.5 * i / (double) max_iteration_;
b = 1.0 * i / (double) max_iteration_;
colors_[i] = Color(r, g, b);
}
colors_[max_iteration_] = Color(0.0, 0.0, 0.0);
}
void Fractal::CalculateEscapeTime() {
int i, j;
double x, y, xmin, ymin;
xmin = center_x_ - pixel_size_ * (width_ / 2.0 - 0.5);
ymin = center_y_ - pixel_size_ * (height_ / 2.0 - 0.5);
escape_times_.resize(height_);
for (j = 0; j < height_; j++) {
escape_times_[j].resize(width_);
for (i = 0; i < width_; i++) {
x = xmin + i * pixel_size_;
y = ymin + j * pixel_size_;
escape_times_[j][i] = EscapeOne(x, y);
}
}
}
void Fractal::Draw() {
int x, y, iter;
for (y = 0; y < height_; y++) {
for (x = 0; x < width_; x++) {
iter = escape_times_[y][x];
glColor3d(colors_[iter].r, colors_[iter].g, colors_[iter].b);
glBegin(GL_POINTS);
glVertex2d(x, y);
glEnd();
}
}
}
void Fractal::Center(double x, double y) {
RecalcCenter(x, y);
RecalcMins();
CalculateEscapeTime();
}
<|fim▁hole|> CalculateEscapeTime();
}
void Fractal::ZoomOut(double x, double y) {
RecalcCenter(x, y);
pixel_size_ *= 2.0;
RecalcMins();
CalculateEscapeTime();
}
void Fractal::RecalcCenter(double x, double y) {
center_x_ = min_x_ + pixel_size_ * x;
center_y_ = min_y_ + pixel_size_ * y;
}
void Fractal::RecalcMins() {
min_x_ = center_x_ - pixel_size_ * (width_ / 2.0 - 0.5);
min_y_ = center_y_ - pixel_size_ * (height_ / 2.0 - 0.5);
}<|fim▁end|> | void Fractal::ZoomIn(double x, double y) {
RecalcCenter(x, y);
pixel_size_ /= 2.0;
RecalcMins(); |
<|file_name|>svgsvgelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::SVGSVGElementBinding;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::{AttributeMutation, Element, RawLayoutElementHelpers};
use dom::node::Node;
use dom::svggraphicselement::SVGGraphicsElement;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use script_layout_interface::SVGSVGData;
use style::attr::AttrValue;
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[dom_struct]
pub struct SVGSVGElement {
svggraphicselement: SVGGraphicsElement
}
impl SVGSVGElement {
fn new_inherited(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> SVGSVGElement {
SVGSVGElement {
svggraphicselement:
SVGGraphicsElement::new_inherited(local_name, prefix, document)
}
}
<|fim▁hole|> Node::reflect_node(box SVGSVGElement::new_inherited(local_name, prefix, document),
document,
SVGSVGElementBinding::Wrap)
}
}
pub trait LayoutSVGSVGElementHelpers {
fn data(&self) -> SVGSVGData;
}
impl LayoutSVGSVGElementHelpers for LayoutJS<SVGSVGElement> {
#[allow(unsafe_code)]
fn data(&self) -> SVGSVGData {
unsafe {
let SVG = &*self.unsafe_get();
let width_attr = SVG.upcast::<Element>().get_attr_for_layout(&ns!(), &local_name!("width"));
let height_attr = SVG.upcast::<Element>().get_attr_for_layout(&ns!(), &local_name!("height"));
SVGSVGData {
width: width_attr.map_or(DEFAULT_WIDTH, |val| val.as_uint()),
height: height_attr.map_or(DEFAULT_HEIGHT, |val| val.as_uint()),
}
}
}
}
impl VirtualMethods for SVGSVGElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<SVGGraphicsElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("width") => AttrValue::from_u32(value.into(), DEFAULT_WIDTH),
&local_name!("height") => AttrValue::from_u32(value.into(), DEFAULT_HEIGHT),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}<|fim▁end|> | #[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> Root<SVGSVGElement> { |
<|file_name|>shareDialog.js<|end_file_name|><|fim▁begin|>const html = require('choo/html');
module.exports = function(name, url) {
const dialog = function(state, emit, close) {
return html`
<send-share-dialog
class="flex flex-col items-center text-center p-4 max-w-sm m-auto"
>
<h1 class="text-3xl font-bold my-4">
${state.translate('notifyUploadEncryptDone')}
</h1>
<p class="font-normal leading-normal text-grey-80 dark:text-grey-40">
${state.translate('shareLinkDescription')}<br />
<span class="word-break-all">${name}</span>
</p>
<input<|fim▁hole|> type="text"
id="share-url"
class="w-full my-4 border rounded-lg leading-loose h-12 px-2 py-1 dark:bg-grey-80"
value="${url}"
readonly="true"
/>
<button
class="btn rounded-lg w-full flex-shrink-0 focus:outline"
onclick="${share}"
title="${state.translate('shareLinkButton')}"
>
${state.translate('shareLinkButton')}
</button>
<button
class="link-blue my-4 font-medium cursor-pointer focus:outline"
onclick="${close}"
title="${state.translate('okButton')}"
>
${state.translate('okButton')}
</button>
</send-share-dialog>
`;
async function share(event) {
event.stopPropagation();
try {
await navigator.share({
title: state.translate('-send-brand'),
text: state.translate('shareMessage', { name }),
url
});
} catch (e) {
if (e.code === e.ABORT_ERR) {
return;
}
console.error(e);
}
close();
}
};
dialog.type = 'share';
return dialog;
};<|fim▁end|> | |
<|file_name|>tdAppliance1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: tdAppliance1.py $
"""
VirtualBox Validation Kit - IAppliance Test #1
"""
<|fim▁hole|>This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 105541 $"
# Standard Python imports.
import os
import sys
import tarfile
# Only the main script needs to modify the path.
try: __file__
except: __file__ = sys.argv[0];
g_ksValidationKitDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))));
sys.path.append(g_ksValidationKitDir);
# Validation Kit imports.
from testdriver import reporter;
from testdriver import base;
from testdriver import vbox;
from testdriver import vboxwrappers;
class tdAppliance1(vbox.TestDriver):
"""
IAppliance Test #1.
"""
def __init__(self):
vbox.TestDriver.__init__(self);
self.asRsrcs = None;
#
# Overridden methods.
#
def actionConfig(self):
"""
Import the API.
"""
if not self.importVBoxApi():
return False;
return True;
def actionExecute(self):
"""
Execute the testcase.
"""
fRc = True;
# Import a set of simple OVAs.
# Note! Manifests generated by ovftool 4.0.0 does not include the ovf, while the ones b 4.1.0 does.
for sOva in (
# t1 is a plain VM without any disks, ovftool 4.0 export from fusion
'tdAppliance1-t1.ova',
# t2 is a plain VM with one disk. Both 4.0 and 4.1.0 exports.
'tdAppliance1-t2.ova',
'tdAppliance1-t2-ovftool-4.1.0.ova',
# t3 is a VM with one gzipped disk and selecting SHA256 on the ovftool cmdline (--compress=9 --shaAlgorithm=sha256).
'tdAppliance1-t3.ova',
'tdAppliance1-t3-ovftool-4.1.0.ova',
# t4 is a VM with with two gzipped disk, SHA256 and a (self) signed manifest (--privateKey=./tdAppliance1-t4.pem).
'tdAppliance1-t4.ova',
'tdAppliance1-t4-ovftool-4.1.0.ova',
# t5 is a VM with with one gzipped disk, SHA1 and a manifest signed by a valid (2016) DigiCert code signing certificate.
'tdAppliance1-t5.ova',
'tdAppliance1-t5-ovftool-4.1.0.ova',
# t6 is a VM with with one gzipped disk, SHA1 and a manifest signed by a certificate issued by the t4 certificate,
# thus it should be impossible to establish a trusted path to a root CA.
'tdAppliance1-t6.ova',
'tdAppliance1-t6-ovftool-4.1.0.ova',
):
reporter.testStart(sOva);
try:
fRc = self.testImportOva(os.path.join(g_ksValidationKitDir, 'tests', 'api', sOva)) and fRc;
fRc = self.testImportOvaAsOvf(os.path.join(g_ksValidationKitDir, 'tests', 'api', sOva)) and fRc;
except:
reporter.errorXcpt();
fRc = False;
fRc = reporter.testDone() and fRc;
## @todo more stuff
return fRc;
#
# Test execution helpers.
#
def testImportOva(self, sOva):
""" xxx """
oVirtualBox = self.oVBoxMgr.getVirtualBox();
#
# Import it as OVA.
#
try:
oAppliance = oVirtualBox.createAppliance();
except:
return reporter.errorXcpt('IVirtualBox::createAppliance failed');
print "oAppliance=%s" % (oAppliance,)
try:
oProgress = vboxwrappers.ProgressWrapper(oAppliance.read(sOva), self.oVBoxMgr, self, 'read "%s"' % (sOva,));
except:
return reporter.errorXcpt('IAppliance::read("%s") failed' % (sOva,));
oProgress.wait();
if oProgress.logResult() is False:
return False;
try:
oAppliance.interpret();
except:
return reporter.errorXcpt('IAppliance::interpret() failed on "%s"' % (sOva,));
#
try:
oProgress = vboxwrappers.ProgressWrapper(oAppliance.importMachines([]),
self.oVBoxMgr, self, 'importMachines "%s"' % (sOva,));
except:
return reporter.errorXcpt('IAppliance::importMachines failed on "%s"' % (sOva,));
oProgress.wait();
if oProgress.logResult() is False:
return False;
#
# Export the
#
## @todo do more with this OVA. Like untaring it and loading it as an OVF. Export it and import it again.
return True;
def testImportOvaAsOvf(self, sOva):
"""
Unpacts the OVA into a subdirectory in the scratch area and imports it as an OVF.
"""
oVirtualBox = self.oVBoxMgr.getVirtualBox();
sTmpDir = os.path.join(self.sScratchPath, os.path.split(sOva)[1] + '-ovf');
sOvf = os.path.join(sTmpDir, os.path.splitext(os.path.split(sOva)[1])[0] + '.ovf');
#
# Unpack
#
try:
os.mkdir(sTmpDir, 0755);
oTarFile = tarfile.open(sOva, 'r:*');
oTarFile.extractall(sTmpDir);
oTarFile.close();
except:
return reporter.errorXcpt('Unpacking "%s" to "%s" for OVF style importing failed' % (sOvf, sTmpDir,));
#
# Import.
#
try:
oAppliance2 = oVirtualBox.createAppliance();
except:
return reporter.errorXcpt('IVirtualBox::createAppliance failed (#2)');
print "oAppliance2=%s" % (oAppliance2,)
try:
oProgress = vboxwrappers.ProgressWrapper(oAppliance2.read(sOvf), self.oVBoxMgr, self, 'read "%s"' % (sOvf,));
except:
return reporter.errorXcpt('IAppliance::read("%s") failed' % (sOvf,));
oProgress.wait();
if oProgress.logResult() is False:
return False;
try:
oAppliance2.interpret();
except:
return reporter.errorXcpt('IAppliance::interpret() failed on "%s"' % (sOvf,));
try:
oProgress = vboxwrappers.ProgressWrapper(oAppliance2.importMachines([]),
self.oVBoxMgr, self, 'importMachines "%s"' % (sOvf,));
except:
return reporter.errorXcpt('IAppliance::importMachines failed on "%s"' % (sOvf,));
oProgress.wait();
if oProgress.logResult() is False:
return False;
return True;
if __name__ == '__main__':
sys.exit(tdAppliance1().main(sys.argv));<|fim▁end|> | __copyright__ = \
"""
Copyright (C) 2010-2015 Oracle Corporation
|
<|file_name|>privacy.rs<|end_file_name|><|fim▁begin|>// Test that we do not get a privacy error here. Initially, we did,
// because we inferred an outlives predciate of `<Foo<'a> as
// Private>::Out: 'a`, but the private trait is -- well -- private,
// and hence it was not something that a pub trait could refer to.
//
// run-pass<|fim▁hole|>pub struct Foo<'a> {
field: Option<&'a <Foo<'a> as Private>::Out>
}
trait Private {
type Out: ?Sized;
}
impl<T: ?Sized> Private for T { type Out = Self; }
fn main() { }<|fim▁end|> |
#![allow(dead_code)]
|
<|file_name|>graph_builder_apply.go<|end_file_name|><|fim▁begin|>package terraform
import (
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/dag"
"github.com/hashicorp/terraform/plans"
"github.com/hashicorp/terraform/states"
"github.com/hashicorp/terraform/tfdiags"
)
// ApplyGraphBuilder implements GraphBuilder and is responsible for building
// a graph for applying a Terraform diff.
//
// Because the graph is built from the diff (vs. the config or state),
// this helps ensure that the apply-time graph doesn't modify any resources
// that aren't explicitly in the diff. There are other scenarios where the
// diff can be deviated, so this is just one layer of protection.
type ApplyGraphBuilder struct {
// Config is the configuration tree that the diff was built from.
Config *configs.Config
// Changes describes the changes that we need apply.
Changes *plans.Changes
// State is the current state
State *states.State
// Components is a factory for the plug-in components (providers and
// provisioners) available for use.
Components contextComponentFactory
// Schemas is the repository of schemas we will draw from to analyse
// the configuration.
Schemas *Schemas
// Targets are resources to target. This is only required to make sure
// unnecessary outputs aren't included in the apply graph. The plan
// builder successfully handles targeting resources. In the future,
// outputs should go into the diff so that this is unnecessary.
Targets []addrs.Targetable
// DisableReduce, if true, will not reduce the graph. Great for testing.
DisableReduce bool
// Destroy, if true, represents a pure destroy operation
Destroy bool
// Validate will do structural validation of the graph.
Validate bool
}
// See GraphBuilder
func (b *ApplyGraphBuilder) Build(path addrs.ModuleInstance) (*Graph, tfdiags.Diagnostics) {
return (&BasicGraphBuilder{
Steps: b.Steps(),
Validate: b.Validate,
Name: "ApplyGraphBuilder",
}).Build(path)
}
// See GraphBuilder
func (b *ApplyGraphBuilder) Steps() []GraphTransformer {
// Custom factory for creating providers.
concreteProvider := func(a *NodeAbstractProvider) dag.Vertex {
return &NodeApplyableProvider{
NodeAbstractProvider: a,
}
}
concreteResource := func(a *NodeAbstractResource) dag.Vertex {
return &nodeExpandApplyableResource{
NodeAbstractResource: a,
}
}
concreteResourceInstance := func(a *NodeAbstractResourceInstance) dag.Vertex {
return &NodeApplyableResourceInstance{
NodeAbstractResourceInstance: a,
}
}
steps := []GraphTransformer{
// Creates all the resources represented in the config. During apply,
// we use this just to ensure that the whole-resource metadata is
// updated to reflect things such as whether the count argument is
// set in config, or which provider configuration manages each resource.
&ConfigTransformer{
Concrete: concreteResource,
Config: b.Config,
},
// Creates all the resource instances represented in the diff, along
// with dependency edges against the whole-resource nodes added by
// ConfigTransformer above.
&DiffTransformer{
Concrete: concreteResourceInstance,
State: b.State,
Changes: b.Changes,
},
// Create orphan output nodes
&OrphanOutputTransformer{Config: b.Config, State: b.State},
// Attach the configuration to any resources
&AttachResourceConfigTransformer{Config: b.Config},
// Attach the state
&AttachStateTransformer{State: b.State},
// Provisioner-related transformations
&MissingProvisionerTransformer{Provisioners: b.Components.ResourceProvisioners()},
&ProvisionerTransformer{},
// Add root variables
&RootVariableTransformer{Config: b.Config},
// Add the local values
&LocalTransformer{Config: b.Config},
// Add the outputs
&OutputTransformer{Config: b.Config},
// Add module variables
&ModuleVariableTransformer{Config: b.Config},
// add providers
TransformProviders(b.Components.ResourceProviders(), concreteProvider, b.Config),
// Remove modules no longer present in the config
&RemovedModuleTransformer{Config: b.Config, State: b.State},
// Must attach schemas before ReferenceTransformer so that we can
// analyze the configuration to find references.
&AttachSchemaTransformer{Schemas: b.Schemas, Config: b.Config},
// Create expansion nodes for all of the module calls. This must
// come after all other transformers that create nodes representing
// objects that can belong to modules.
&ModuleExpansionTransformer{Config: b.Config},
// Connect references so ordering is correct
&ReferenceTransformer{},
&AttachDependenciesTransformer{},
// Destruction ordering
&DestroyEdgeTransformer{
Config: b.Config,
State: b.State,
Schemas: b.Schemas,
},
&CBDEdgeTransformer{
Config: b.Config,
State: b.State,
Schemas: b.Schemas,
},
// Create a destroy node for outputs to remove them from the state.
&DestroyOutputTransformer{Destroy: b.Destroy},
// Prune unreferenced values, which may have interpolations that can't
// be resolved.
&PruneUnusedValuesTransformer{
Destroy: b.Destroy,
},
// Add the node to fix the state count boundaries
&CountBoundaryTransformer{
Config: b.Config,
},
// Target
&TargetsTransformer{Targets: b.Targets},
// Close opened plugin connections
&CloseProviderTransformer{},
&CloseProvisionerTransformer{},
// close the root module
&CloseRootModuleTransformer{},<|fim▁hole|> if !b.DisableReduce {
// Perform the transitive reduction to make our graph a bit
// more sane if possible (it usually is possible).
steps = append(steps, &TransitiveReductionTransformer{})
}
return steps
}<|fim▁end|> | }
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Dependencies
const queryValidation = require('./libs/query-validation.js');
const utils = require('./libs/utilities');
/**
* Create an instance of Behance
* @param {string} token - authentication for Behance API
* @public
*/
const Behance = function behance(token) {
this.clientId = token;
// Throw an error if Auth Key is not specified
if (this.clientId === undefined) {
throw Error('Please supply an authorization token for new Behance().');
}
};
// Endpoints that only support Options
const endpointWithOptionOnly = [{
name: 'projects',
path: 'projects',
queries: queryValidation.projects
}, {
name: 'creativesToFollow',
path: 'creativestofollow',
queries: queryValidation.creativesToFollow
}, {
name: 'users',
path: 'users',
queries: queryValidation.users
}, {
name: 'teams',
path: 'teams',
queries: queryValidation.teams
}, {
name: 'collections',
path: 'collections',
queries: queryValidation.collections
}];
endpointWithOptionOnly.forEach(function iterate(def) {
/**<|fim▁hole|> * @param {function} cb - callback
* @return {object} - response from Behance API
* @public
*/
Behance.prototype[def.name] = function assign(opts, cb) {
if (Object.keys(opts).length === 0 || utils.compareKeys(opts, def.queries, def.name)) {
utils.requestHandler(utils.requestUrl(def.path, this.clientId, opts), cb);
}
};
});
/**
* Endpoints that require an ID with no Options
*/
const endpointWithOnlyAnId = [{
name: 'project',
pathprefix: 'projects/'
}, {
name: 'user',
pathprefix: 'users/'
}, {
name: 'team',
pathprefix: 'teams/'
}, {
name: 'userStats',
pathprefix: 'users/',
pathsuffix: '/stats'
}, {
name: 'userWorkExperience',
pathprefix: 'users/',
pathsuffix: '/work_experience'
}, {
name: 'collection',
path: 'collection'
}];
endpointWithOnlyAnId.forEach(function iterate(def) {
/**
* Get info about a project/user/collection
* @param {string} id - identifier
* @param {function} cb - callback
* @return {object} - response from Behance API
* @public
*/
Behance.prototype[def.name] = function assign(id, cb) {
const endpoint = def.pathprefix + id + (def.pathsuffix ? def.pathsuffix : '');
if (arguments.length !== 2) {
throw Error(`.${def.name} requires both an id and a callback function.`);
}
utils.requestHandler(utils.requestUrl(endpoint, this.clientId), cb);
};
});
/**
* Endpoints that require an ID and support Options
*/
const endpointWithIdAndOptions = [{
name: 'projectComments',
pathprefix: 'projects/',
pathsuffix: '/comments',
queries: queryValidation.projectComments
}, {
name: 'userProjects',
pathprefix: 'users/',
pathsuffix: '/projects',
queries: queryValidation.userProjects
}, {
name: 'userWips',
pathprefix: 'users/',
pathsuffix: '/wips',
queries: queryValidation.userWips
}, {
name: 'teamProjects',
pathprefix: 'teams/',
pathsuffix: '/projects',
queries: queryValidation.teamsProjects
}, {
name: 'userAppreciations',
pathprefix: 'users/',
pathsuffix: '/appreciations',
queries: queryValidation.userAppreciations
}, {
name: 'userCollections',
pathprefix: 'users/',
pathsuffix: '/collections',
queries: queryValidation.userCollections
}, {
name: 'userFollowers',
pathprefix: 'users/',
pathsuffix: '/followers',
queries: queryValidation.userFollowers
}, {
name: 'userFollowing',
pathprefix: 'users/',
pathsuffix: '/following',
queries: queryValidation.userFollowing
}, {
name: 'collectionProjects',
pathprefix: 'collections/',
pathsuffix: '/projects',
queries: queryValidation.collectionProjects
}];
endpointWithIdAndOptions.forEach(function iterate(def) {
/**
* Get a list of comments/projects/wips/appreciations/collections/followers
* @param {string} id - identifier
* @param {object} opts - queries
* @param {function} cb - callback
* @return {object} - response from Behance API
* @public
*/
Behance.prototype[def.name] = function assign(id, opts, cb) {
const endpoint = def.pathprefix + id + (def.pathsuffix ? def.pathsuffix : '');
let newCb;
let newOpts;
// Update Params order if options aren't supplied
if (arguments.length === 2) {
newCb = opts;
newOpts = {};
}
if (id === '' || typeof id === 'object') {
throw Error(`.${def.name} requires at least an id and a callback function.`);
}
if (Object.keys(opts).length === 0 || utils.compareKeys(opts, def.queries, def.name)) {
utils.requestHandler(utils.requestUrl(endpoint, this.clientId, newOpts || opts), newCb || cb);
}
};
});
/**
* Get Creative Fields
* @param {function} cb - callback
* @return {object} - response from Behance API
* @public
*/
Behance.prototype.fields = function assign(cb) {
utils.requestHandler(utils.requestUrl('fields', this.clientId), cb);
};
module.exports = Behance;<|fim▁end|> | * Get a list of projects/creatives/users/collections
* @param {object} opts - queries |
<|file_name|>AliPMDHitsRead.C<|end_file_name|><|fim▁begin|>//
// This macro reads the Hits Tree
//
void AliPMDHitsRead(Int_t nevt = 1)
{
TStopwatch timer;
timer.Start();
TH2F *h2 = new TH2F("h2"," Y vs. X",200,-100.,100.,200,-100.,100.);
// FILE *fpw = fopen("alipmdhits.dat","w");
AliRunLoader *fRunLoader = AliRunLoader::Open("galice.root");
if (!fRunLoader)
{
printf("Can not open session for file ");
}
if (!fRunLoader->GetAliRun()) fRunLoader->LoadgAlice();
if (!fRunLoader->TreeE()) fRunLoader->LoadHeader();
if (!fRunLoader->TreeK()) fRunLoader->LoadKinematics();
gAlice = fRunLoader->GetAliRun();
if (gAlice)
{
printf("Alirun object found\n");
}
else
{
printf("Could not found Alirun object\n");
}
fPMD = (AliPMD*)gAlice->GetDetector("PMD");
fPMDLoader = fRunLoader->GetLoader("PMDLoader");
if (fPMDLoader == 0x0)
{
printf("Can not find PMDLoader\n");
}
fPMDLoader->LoadHits("READ");
// This reads the PMD Hits tree and assigns the right track number
// to a cell and stores in the summable digits tree
//
const Int_t kPi0 = 111;
const Int_t kGamma = 22;
Int_t npmd;
Int_t trackno;
Int_t smnumber;
Int_t trackpid;
Int_t mtrackno;
Int_t mtrackpid;
Int_t xpad = -1, ypad = -1;
Float_t edep;
Float_t vx = -999.0, vy = -999.0, vz = -999.0;
Float_t xPos, yPos, zPos;
Float_t xx, yy;
AliPMDUtility cc;
for (Int_t ievt = 0; ievt < nevt; ievt++)
{
printf("Event Number = %d\n",ievt);
Int_t nparticles = fRunLoader->GetHeader()->GetNtrack();
printf("Number of Particles = %d\n",nparticles);
fRunLoader->GetEvent(ievt);
// ------------------------------------------------------- //
// Pointer to specific detector hits.
// Get pointers to Alice detectors and Hits containers
TTree* treeH = fPMDLoader->TreeH();
Int_t ntracks = (Int_t) treeH->GetEntries();
printf("Number of Tracks in the TreeH = %d\n", ntracks);
TClonesArray* hits = 0;
if (fPMD) hits = fPMD->Hits();
// Start loop on tracks in the hits containers
for (Int_t track=0; track<ntracks;track++)
{
gAlice->GetMCApp()->ResetHits();
treeH->GetEvent(track);
if (fPMD)
{
npmd = hits->GetEntriesFast();
for (int ipmd = 0; ipmd < npmd; ipmd++)
{
fPMDHit = (AliPMDhit*) hits->UncheckedAt(ipmd);
trackno = fPMDHit->GetTrack();
//fprintf(fpw,"trackno = %d\n",trackno);
// get kinematics of the particles
TParticle* mparticle = gAlice->GetMCApp()->Particle(trackno);
trackpid = mparticle->GetPdgCode();
Int_t igatr = -999;
Int_t ichtr = -999;
Int_t igapid = -999;
Int_t imo;
Int_t igen = 0;
Int_t idmo = -999;
Int_t tracknoOld=0, trackpidOld=0, statusOld = 0;
if (mparticle->GetFirstMother() == -1)
{
tracknoOld = trackno;
trackpidOld = trackpid;
statusOld = -1;
vx = mparticle->Vx();
vy = mparticle->Vy();
vz = mparticle->Vz();
//fprintf(fpw,"==> Mother ID %5d %5d %5d Vertex: %13.3f %13.3f %13.3f\n", igen, -1, trackpid, vx, vy, vz);
}
Int_t igstatus = 0;
while((imo = mparticle->GetFirstMother()) >= 0)
{
igen++;
mparticle = gAlice->GetMCApp()->Particle(imo);
idmo = mparticle->GetPdgCode();
vx = mparticle->Vx();
vy = mparticle->Vy();
vz = mparticle->Vz();
//printf("==> Mother ID %5d %5d %5d Vertex: %13.3f %13.3f %13.3f\n", igen, imo, idmo, vx, vy, vz);
//fprintf(fpw,"==> Mother ID %5d %5d %5d Vertex: %13.3f %13.3f %13.3f\n", igen, imo, idmo, vx, vy, vz);
<|fim▁hole|> {
igatr = imo;
igapid = idmo;
igstatus = 1;
}
if(igstatus == 0)
{
if (idmo == kPi0 && vx == 0. && vy == 0. && vz == 0.)
{
igatr = imo;
igapid = idmo;
}
}
ichtr = imo;
} // end of while loop
if (idmo == kPi0 && vx == 0. && vy == 0. && vz == 0.)
{
mtrackno = igatr;
mtrackpid = igapid;
}
else
{
mtrackno = ichtr;
mtrackpid = idmo;
}
if (statusOld == -1)
{
mtrackno = tracknoOld;
mtrackpid = trackpidOld;
}
//printf("mtrackno = %d mtrackpid = %d\n",mtrackno,mtrackpid);
xPos = fPMDHit->X();
yPos = fPMDHit->Y();
zPos = fPMDHit->Z();
Float_t time = fPMDHit->GetTime();
printf("++++++++++ time = %f\n",time);
edep = fPMDHit->GetEnergy();
Int_t vol1 = fPMDHit->GetVolume(1); // Column
Int_t vol2 = fPMDHit->GetVolume(2); // Row
Int_t vol3 = fPMDHit->GetVolume(4); // UnitModule
// -----------------------------------------//
// For Super Module 1 & 2 //
// nrow = 96, ncol = 48 //
// For Super Module 3 & 4 //
// nrow = 48, ncol = 96 //
// -----------------------------------------//
if (vol3 < 24)
{
smnumber = vol3;
}
else
{
smnumber = vol3 - 24;
}
xpad = vol1;
ypad = vol2;
if(zPos > 361.5)
{
cc.RectGeomCellPos(smnumber,xpad,ypad,xx,yy);
h2->Fill(xx,yy);
}
}
}
} // Track Loop ended
}
h2->Draw();
fRunLoader->UnloadgAlice();
fRunLoader->UnloadHeader();
fRunLoader->UnloadKinematics();
fPMDLoader->UnloadHits();
timer.Stop();
timer.Print();
}<|fim▁end|> | if ((idmo == kGamma || idmo == -11 || idmo == 11) && vx == 0. && vy == 0. && vz == 0.) |
<|file_name|>rich_transport.go<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package thrift
import "io"
type RichTransport struct {
TTransport
}
// Wraps Transport to provide TRichTransport interface
func NewTRichTransport(trans TTransport) *RichTransport {
return &RichTransport{trans}
}
func (r *RichTransport) ReadByte() (c byte, err error) {
return readByte(r.TTransport)
}
func (r *RichTransport) WriteByte(c byte) error {
return writeByte(r.TTransport, c)
}
func (r *RichTransport) WriteString(s string) (n int, err error) {
return r.Write([]byte(s))
}
func (r *RichTransport) RemainingBytes() (num_bytes uint64) {
return r.TTransport.RemainingBytes()
}
func readByte(r io.Reader) (c byte, err error) {
v := [1]byte{0}
n, err := r.Read(v[0:1])
if n > 0 && (err == nil || err == io.EOF) {
return v[0], nil
}
if n > 0 && err != nil {
return v[0], err
}
if err != nil {
return 0, err
}
return v[0], nil
}
func writeByte(w io.Writer, c byte) error {
v := [1]byte{c}
_, err := w.Write(v[0:1])
return err
}<|fim▁end|> | * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
from setuptools import setup
from subprocess import call
from sys import platform, argv
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
SCRIPTS = ["src/bg_daemon/background_daemon.py"]
# only compile quack when none of these options are chosen
if (all([e not in argv for e in ['egg_info', 'sdist', 'register']]) and
platform == 'darwin'):
try:
call(['make', '-C', 'src/bg_daemon/'])
SCRIPTS.append("src/bg_daemon/quack")
except OSError as e:
print "Can't compile quack, reason {}".format(str(e))
setup(
name="bg_daemon",
version="0.0.1",
author="Santiago Torres",
author_email="[email protected]",
description=("An extensible set of classes that can programmatically "
"update the desktop wallpaper"),
license="GPLv2",
keywords="imgur desktop wallpaper background",
url="https://github.com/santiagotorres/bg_daemon",
packages=["bg_daemon", "bg_daemon.fetchers"],
package_dir={"bg_daemon": "src/bg_daemon",
"bg_daemon.fetchers": "src/bg_daemon/fetchers"},
scripts=SCRIPTS,
include_package_data=True,
data_files=[('bg_daemon', ['src/bg_daemon/settings.json',
'src/bg_daemon/mac-update.sh'])],
long_description=read("README.md"),
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Topic :: Utilities",
"License :: ",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: Unix",
"Topic :: Multimedia",
],
install_requires=[
"imgurpython",
"requests",<|fim▁hole|> ],
)<|fim▁end|> | "python-crontab",
"mock", |
<|file_name|>cftime_offsets.py<|end_file_name|><|fim▁begin|>"""Time offset classes for use with cftime.datetime objects"""
# The offset classes and mechanisms for generating time ranges defined in
# this module were copied/adapted from those defined in pandas. See in
# particular the objects and methods defined in pandas.tseries.offsets
# and pandas.core.indexes.datetimes.
# For reference, here is a copy of the pandas copyright notice:
# (c) 2011-2012, Lambda Foundry, Inc. and PyData Development Team
# All rights reserved.
# Copyright (c) 2008-2011 AQR Capital Management, LLC
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the copyright holder nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
from datetime import timedelta
from distutils.version import LooseVersion
from functools import partial
from typing import ClassVar, Optional
import numpy as np
from ..core.pdcompat import count_not_none
from .cftimeindex import CFTimeIndex, _parse_iso8601_with_reso
from .times import format_cftime_datetime
def get_date_type(calendar):
"""Return the cftime date type for a given calendar name."""
try:
import cftime
except ImportError:
raise ImportError("cftime is required for dates with non-standard calendars")
else:
calendars = {
"noleap": cftime.DatetimeNoLeap,
"360_day": cftime.Datetime360Day,
"365_day": cftime.DatetimeNoLeap,
"366_day": cftime.DatetimeAllLeap,
"gregorian": cftime.DatetimeGregorian,
"proleptic_gregorian": cftime.DatetimeProlepticGregorian,
"julian": cftime.DatetimeJulian,
"all_leap": cftime.DatetimeAllLeap,
"standard": cftime.DatetimeGregorian,
}
return calendars[calendar]
class BaseCFTimeOffset:
_freq: ClassVar[Optional[str]] = None
_day_option: ClassVar[Optional[str]] = None
def __init__(self, n=1):
if not isinstance(n, int):
raise TypeError(
"The provided multiple 'n' must be an integer. "
"Instead a value of type {!r} was provided.".format(type(n))
)
self.n = n
def rule_code(self):
return self._freq
def __eq__(self, other):
return self.n == other.n and self.rule_code() == other.rule_code()
def __ne__(self, other):
return not self == other
def __add__(self, other):
return self.__apply__(other)
def __sub__(self, other):
import cftime
if isinstance(other, cftime.datetime):
raise TypeError("Cannot subtract a cftime.datetime " "from a time offset.")
elif type(other) == type(self):
return type(self)(self.n - other.n)
else:
return NotImplemented
def __mul__(self, other):
return type(self)(n=other * self.n)
def __neg__(self):
return self * -1
def __rmul__(self, other):
return self.__mul__(other)
def __radd__(self, other):
return self.__add__(other)
def __rsub__(self, other):
if isinstance(other, BaseCFTimeOffset) and type(self) != type(other):
raise TypeError("Cannot subtract cftime offsets of differing " "types")
return -self + other
def __apply__(self):
return NotImplemented
def onOffset(self, date):
"""Check if the given date is in the set of possible dates created
using a length-one version of this offset class."""
test_date = (self + date) - self
return date == test_date
def rollforward(self, date):
if self.onOffset(date):
return date
else:
return date + type(self)()
def rollback(self, date):
if self.onOffset(date):
return date
else:
return date - type(self)()
def __str__(self):
return "<{}: n={}>".format(type(self).__name__, self.n)
def __repr__(self):
return str(self)
def _get_offset_day(self, other):
# subclass must implement `_day_option`; calling from the base class
# will raise NotImplementedError.
return _get_day_of_month(other, self._day_option)
def _get_day_of_month(other, day_option):
"""Find the day in `other`'s month that satisfies a BaseCFTimeOffset's
onOffset policy, as described by the `day_option` argument.
Parameters
----------
other : cftime.datetime
day_option : 'start', 'end'
'start': returns 1
'end': returns last day of the month
Returns
-------
day_of_month : int
"""
if day_option == "start":
return 1
elif day_option == "end":
days_in_month = _days_in_month(other)
return days_in_month
elif day_option is None:
# Note: unlike `_shift_month`, _get_day_of_month does not
# allow day_option = None
raise NotImplementedError()
else:
raise ValueError(day_option)
def _days_in_month(date):
"""The number of days in the month of the given date"""
if date.month == 12:
reference = type(date)(date.year + 1, 1, 1)
else:
reference = type(date)(date.year, date.month + 1, 1)
return (reference - timedelta(days=1)).day
def _adjust_n_months(other_day, n, reference_day):
"""Adjust the number of times a monthly offset is applied based
on the day of a given date, and the reference day provided.
"""
if n > 0 and other_day < reference_day:
n = n - 1
elif n <= 0 and other_day > reference_day:
n = n + 1
return n
def _adjust_n_years(other, n, month, reference_day):
"""Adjust the number of times an annual offset is applied based on
another date, and the reference day provided"""
if n > 0:
if other.month < month or (other.month == month and other.day < reference_day):
n -= 1
else:
if other.month > month or (other.month == month and other.day > reference_day):
n += 1
return n
def _shift_month(date, months, day_option="start"):
"""Shift the date to a month start or end a given number of months away.
"""
import cftime
delta_year = (date.month + months) // 12
month = (date.month + months) % 12
if month == 0:
month = 12
delta_year = delta_year - 1
year = date.year + delta_year
if day_option == "start":
day = 1
elif day_option == "end":
reference = type(date)(year, month, 1)
day = _days_in_month(reference)
else:
raise ValueError(day_option)
if LooseVersion(cftime.__version__) < LooseVersion("1.0.4"):
# dayofwk=-1 is required to update the dayofwk and dayofyr attributes of
# the returned date object in versions of cftime between 1.0.2 and
# 1.0.3.4. It can be removed for versions of cftime greater than
# 1.0.3.4.
return date.replace(year=year, month=month, day=day, dayofwk=-1)
else:
return date.replace(year=year, month=month, day=day)
def roll_qtrday(other, n, month, day_option, modby=3):
"""Possibly increment or decrement the number of periods to shift
based on rollforward/rollbackward conventions.
Parameters
----------
other : cftime.datetime
n : number of periods to increment, before adjusting for rolling
month : int reference month giving the first month of the year
day_option : 'start', 'end'
The convention to use in finding the day in a given month against
which to compare for rollforward/rollbackward decisions.
modby : int 3 for quarters, 12 for years
Returns
-------
n : int number of periods to increment
See Also
--------
_get_day_of_month : Find the day in a month provided an offset.
"""
months_since = other.month % modby - month % modby
if n > 0:
if months_since < 0 or (
months_since == 0 and other.day < _get_day_of_month(other, day_option)
):
# pretend to roll back if on same month but
# before compare_day
n -= 1
else:
if months_since > 0 or (
months_since == 0 and other.day > _get_day_of_month(other, day_option)
):
# make sure to roll forward, so negate
n += 1
return n
def _validate_month(month, default_month):
if month is None:
result_month = default_month
else:
result_month = month
if not isinstance(result_month, int):
raise TypeError(
"'self.month' must be an integer value between 1 "
"and 12. Instead, it was set to a value of "
"{!r}".format(result_month)
)
elif not (1 <= result_month <= 12):
raise ValueError(
"'self.month' must be an integer value between 1 "
"and 12. Instead, it was set to a value of "
"{!r}".format(result_month)
)
return result_month
class MonthBegin(BaseCFTimeOffset):
_freq = "MS"
def __apply__(self, other):
n = _adjust_n_months(other.day, self.n, 1)
return _shift_month(other, n, "start")
def onOffset(self, date):
"""Check if the given date is in the set of possible dates created
using a length-one version of this offset class."""
return date.day == 1
class MonthEnd(BaseCFTimeOffset):
_freq = "M"
def __apply__(self, other):
n = _adjust_n_months(other.day, self.n, _days_in_month(other))
return _shift_month(other, n, "end")
def onOffset(self, date):
"""Check if the given date is in the set of possible dates created
using a length-one version of this offset class."""
return date.day == _days_in_month(date)
_MONTH_ABBREVIATIONS = {
1: "JAN",
2: "FEB",
3: "MAR",
4: "APR",
5: "MAY",
6: "JUN",
7: "JUL",
8: "AUG",
9: "SEP",
10: "OCT",
11: "NOV",
12: "DEC",
}
class QuarterOffset(BaseCFTimeOffset):
"""Quarter representation copied off of pandas/tseries/offsets.py
"""
_freq: ClassVar[str]
_default_month: ClassVar[int]
def __init__(self, n=1, month=None):
BaseCFTimeOffset.__init__(self, n)
self.month = _validate_month(month, self._default_month)
def __apply__(self, other):
# months_since: find the calendar quarter containing other.month,
# e.g. if other.month == 8, the calendar quarter is [Jul, Aug, Sep].
# Then find the month in that quarter containing an onOffset date for
# self. `months_since` is the number of months to shift other.month
# to get to this on-offset month.
months_since = other.month % 3 - self.month % 3
qtrs = roll_qtrday(
other, self.n, self.month, day_option=self._day_option, modby=3
)
months = qtrs * 3 - months_since
return _shift_month(other, months, self._day_option)
def onOffset(self, date):
"""Check if the given date is in the set of possible dates created
using a length-one version of this offset class."""
mod_month = (date.month - self.month) % 3
return mod_month == 0 and date.day == self._get_offset_day(date)
def __sub__(self, other):
import cftime
if isinstance(other, cftime.datetime):
raise TypeError("Cannot subtract cftime.datetime from offset.")
elif type(other) == type(self) and other.month == self.month:
return type(self)(self.n - other.n, month=self.month)
else:
return NotImplemented
def __mul__(self, other):
return type(self)(n=other * self.n, month=self.month)
def rule_code(self):
return "{}-{}".format(self._freq, _MONTH_ABBREVIATIONS[self.month])
def __str__(self):
return "<{}: n={}, month={}>".format(type(self).__name__, self.n, self.month)
class QuarterBegin(QuarterOffset):
# When converting a string to an offset, pandas converts
# 'QS' to a QuarterBegin offset starting in the month of
# January. When creating a QuarterBegin offset directly
# from the constructor, however, the default month is March.<|fim▁hole|> _freq = "QS"
_day_option = "start"
def rollforward(self, date):
"""Roll date forward to nearest start of quarter"""
if self.onOffset(date):
return date
else:
return date + QuarterBegin(month=self.month)
def rollback(self, date):
"""Roll date backward to nearest start of quarter"""
if self.onOffset(date):
return date
else:
return date - QuarterBegin(month=self.month)
class QuarterEnd(QuarterOffset):
# When converting a string to an offset, pandas converts
# 'Q' to a QuarterEnd offset starting in the month of
# December. When creating a QuarterEnd offset directly
# from the constructor, however, the default month is March.
# We follow that behavior here.
_default_month = 3
_freq = "Q"
_day_option = "end"
def rollforward(self, date):
"""Roll date forward to nearest end of quarter"""
if self.onOffset(date):
return date
else:
return date + QuarterEnd(month=self.month)
def rollback(self, date):
"""Roll date backward to nearest end of quarter"""
if self.onOffset(date):
return date
else:
return date - QuarterEnd(month=self.month)
class YearOffset(BaseCFTimeOffset):
_freq: ClassVar[str]
_day_option: ClassVar[str]
_default_month: ClassVar[int]
def __init__(self, n=1, month=None):
BaseCFTimeOffset.__init__(self, n)
self.month = _validate_month(month, self._default_month)
def __apply__(self, other):
reference_day = _get_day_of_month(other, self._day_option)
years = _adjust_n_years(other, self.n, self.month, reference_day)
months = years * 12 + (self.month - other.month)
return _shift_month(other, months, self._day_option)
def __sub__(self, other):
import cftime
if isinstance(other, cftime.datetime):
raise TypeError("Cannot subtract cftime.datetime from offset.")
elif type(other) == type(self) and other.month == self.month:
return type(self)(self.n - other.n, month=self.month)
else:
return NotImplemented
def __mul__(self, other):
return type(self)(n=other * self.n, month=self.month)
def rule_code(self):
return "{}-{}".format(self._freq, _MONTH_ABBREVIATIONS[self.month])
def __str__(self):
return "<{}: n={}, month={}>".format(type(self).__name__, self.n, self.month)
class YearBegin(YearOffset):
_freq = "AS"
_day_option = "start"
_default_month = 1
def onOffset(self, date):
"""Check if the given date is in the set of possible dates created
using a length-one version of this offset class."""
return date.day == 1 and date.month == self.month
def rollforward(self, date):
"""Roll date forward to nearest start of year"""
if self.onOffset(date):
return date
else:
return date + YearBegin(month=self.month)
def rollback(self, date):
"""Roll date backward to nearest start of year"""
if self.onOffset(date):
return date
else:
return date - YearBegin(month=self.month)
class YearEnd(YearOffset):
_freq = "A"
_day_option = "end"
_default_month = 12
def onOffset(self, date):
"""Check if the given date is in the set of possible dates created
using a length-one version of this offset class."""
return date.day == _days_in_month(date) and date.month == self.month
def rollforward(self, date):
"""Roll date forward to nearest end of year"""
if self.onOffset(date):
return date
else:
return date + YearEnd(month=self.month)
def rollback(self, date):
"""Roll date backward to nearest end of year"""
if self.onOffset(date):
return date
else:
return date - YearEnd(month=self.month)
class Day(BaseCFTimeOffset):
_freq = "D"
def as_timedelta(self):
return timedelta(days=self.n)
def __apply__(self, other):
return other + self.as_timedelta()
class Hour(BaseCFTimeOffset):
_freq = "H"
def as_timedelta(self):
return timedelta(hours=self.n)
def __apply__(self, other):
return other + self.as_timedelta()
class Minute(BaseCFTimeOffset):
_freq = "T"
def as_timedelta(self):
return timedelta(minutes=self.n)
def __apply__(self, other):
return other + self.as_timedelta()
class Second(BaseCFTimeOffset):
_freq = "S"
def as_timedelta(self):
return timedelta(seconds=self.n)
def __apply__(self, other):
return other + self.as_timedelta()
_FREQUENCIES = {
"A": YearEnd,
"AS": YearBegin,
"Y": YearEnd,
"YS": YearBegin,
"Q": partial(QuarterEnd, month=12),
"QS": partial(QuarterBegin, month=1),
"M": MonthEnd,
"MS": MonthBegin,
"D": Day,
"H": Hour,
"T": Minute,
"min": Minute,
"S": Second,
"AS-JAN": partial(YearBegin, month=1),
"AS-FEB": partial(YearBegin, month=2),
"AS-MAR": partial(YearBegin, month=3),
"AS-APR": partial(YearBegin, month=4),
"AS-MAY": partial(YearBegin, month=5),
"AS-JUN": partial(YearBegin, month=6),
"AS-JUL": partial(YearBegin, month=7),
"AS-AUG": partial(YearBegin, month=8),
"AS-SEP": partial(YearBegin, month=9),
"AS-OCT": partial(YearBegin, month=10),
"AS-NOV": partial(YearBegin, month=11),
"AS-DEC": partial(YearBegin, month=12),
"A-JAN": partial(YearEnd, month=1),
"A-FEB": partial(YearEnd, month=2),
"A-MAR": partial(YearEnd, month=3),
"A-APR": partial(YearEnd, month=4),
"A-MAY": partial(YearEnd, month=5),
"A-JUN": partial(YearEnd, month=6),
"A-JUL": partial(YearEnd, month=7),
"A-AUG": partial(YearEnd, month=8),
"A-SEP": partial(YearEnd, month=9),
"A-OCT": partial(YearEnd, month=10),
"A-NOV": partial(YearEnd, month=11),
"A-DEC": partial(YearEnd, month=12),
"QS-JAN": partial(QuarterBegin, month=1),
"QS-FEB": partial(QuarterBegin, month=2),
"QS-MAR": partial(QuarterBegin, month=3),
"QS-APR": partial(QuarterBegin, month=4),
"QS-MAY": partial(QuarterBegin, month=5),
"QS-JUN": partial(QuarterBegin, month=6),
"QS-JUL": partial(QuarterBegin, month=7),
"QS-AUG": partial(QuarterBegin, month=8),
"QS-SEP": partial(QuarterBegin, month=9),
"QS-OCT": partial(QuarterBegin, month=10),
"QS-NOV": partial(QuarterBegin, month=11),
"QS-DEC": partial(QuarterBegin, month=12),
"Q-JAN": partial(QuarterEnd, month=1),
"Q-FEB": partial(QuarterEnd, month=2),
"Q-MAR": partial(QuarterEnd, month=3),
"Q-APR": partial(QuarterEnd, month=4),
"Q-MAY": partial(QuarterEnd, month=5),
"Q-JUN": partial(QuarterEnd, month=6),
"Q-JUL": partial(QuarterEnd, month=7),
"Q-AUG": partial(QuarterEnd, month=8),
"Q-SEP": partial(QuarterEnd, month=9),
"Q-OCT": partial(QuarterEnd, month=10),
"Q-NOV": partial(QuarterEnd, month=11),
"Q-DEC": partial(QuarterEnd, month=12),
}
_FREQUENCY_CONDITION = "|".join(_FREQUENCIES.keys())
_PATTERN = fr"^((?P<multiple>\d+)|())(?P<freq>({_FREQUENCY_CONDITION}))$"
# pandas defines these offsets as "Tick" objects, which for instance have
# distinct behavior from monthly or longer frequencies in resample.
CFTIME_TICKS = (Day, Hour, Minute, Second)
def to_offset(freq):
"""Convert a frequency string to the appropriate subclass of
BaseCFTimeOffset."""
if isinstance(freq, BaseCFTimeOffset):
return freq
else:
try:
freq_data = re.match(_PATTERN, freq).groupdict()
except AttributeError:
raise ValueError("Invalid frequency string provided")
freq = freq_data["freq"]
multiples = freq_data["multiple"]
if multiples is None:
multiples = 1
else:
multiples = int(multiples)
return _FREQUENCIES[freq](n=multiples)
def to_cftime_datetime(date_str_or_date, calendar=None):
import cftime
if isinstance(date_str_or_date, str):
if calendar is None:
raise ValueError(
"If converting a string to a cftime.datetime object, "
"a calendar type must be provided"
)
date, _ = _parse_iso8601_with_reso(get_date_type(calendar), date_str_or_date)
return date
elif isinstance(date_str_or_date, cftime.datetime):
return date_str_or_date
else:
raise TypeError(
"date_str_or_date must be a string or a "
"subclass of cftime.datetime. Instead got "
"{!r}.".format(date_str_or_date)
)
def normalize_date(date):
"""Round datetime down to midnight."""
return date.replace(hour=0, minute=0, second=0, microsecond=0)
def _maybe_normalize_date(date, normalize):
"""Round datetime down to midnight if normalize is True."""
if normalize:
return normalize_date(date)
else:
return date
def _generate_linear_range(start, end, periods):
"""Generate an equally-spaced sequence of cftime.datetime objects between
and including two dates (whose length equals the number of periods)."""
import cftime
total_seconds = (end - start).total_seconds()
values = np.linspace(0.0, total_seconds, periods, endpoint=True)
units = "seconds since {}".format(format_cftime_datetime(start))
calendar = start.calendar
return cftime.num2date(
values, units=units, calendar=calendar, only_use_cftime_datetimes=True
)
def _generate_range(start, end, periods, offset):
"""Generate a regular range of cftime.datetime objects with a
given time offset.
Adapted from pandas.tseries.offsets.generate_range.
Parameters
----------
start : cftime.datetime, or None
Start of range
end : cftime.datetime, or None
End of range
periods : int, or None
Number of elements in the sequence
offset : BaseCFTimeOffset
An offset class designed for working with cftime.datetime objects
Returns
-------
A generator object
"""
if start:
start = offset.rollforward(start)
if end:
end = offset.rollback(end)
if periods is None and end < start:
end = None
periods = 0
if end is None:
end = start + (periods - 1) * offset
if start is None:
start = end - (periods - 1) * offset
current = start
if offset.n >= 0:
while current <= end:
yield current
next_date = current + offset
if next_date <= current:
raise ValueError(f"Offset {offset} did not increment date")
current = next_date
else:
while current >= end:
yield current
next_date = current + offset
if next_date >= current:
raise ValueError(f"Offset {offset} did not decrement date")
current = next_date
def cftime_range(
start=None,
end=None,
periods=None,
freq="D",
normalize=False,
name=None,
closed=None,
calendar="standard",
):
"""Return a fixed frequency CFTimeIndex.
Parameters
----------
start : str or cftime.datetime, optional
Left bound for generating dates.
end : str or cftime.datetime, optional
Right bound for generating dates.
periods : integer, optional
Number of periods to generate.
freq : str, default 'D', BaseCFTimeOffset, or None
Frequency strings can have multiples, e.g. '5H'.
normalize : bool, default False
Normalize start/end dates to midnight before generating date range.
name : str, default None
Name of the resulting index
closed : {None, 'left', 'right'}, optional
Make the interval closed with respect to the given frequency to the
'left', 'right', or both sides (None, the default).
calendar : str
Calendar type for the datetimes (default 'standard').
Returns
-------
CFTimeIndex
Notes
-----
This function is an analog of ``pandas.date_range`` for use in generating
sequences of ``cftime.datetime`` objects. It supports most of the
features of ``pandas.date_range`` (e.g. specifying how the index is
``closed`` on either side, or whether or not to ``normalize`` the start and
end bounds); however, there are some notable exceptions:
- You cannot specify a ``tz`` (time zone) argument.
- Start or end dates specified as partial-datetime strings must use the
`ISO-8601 format <https://en.wikipedia.org/wiki/ISO_8601>`_.
- It supports many, but not all, frequencies supported by
``pandas.date_range``. For example it does not currently support any of
the business-related, semi-monthly, or sub-second frequencies.
- Compound sub-monthly frequencies are not supported, e.g. '1H1min', as
these can easily be written in terms of the finest common resolution,
e.g. '61min'.
Valid simple frequency strings for use with ``cftime``-calendars include
any multiples of the following.
+--------+--------------------------+
| Alias | Description |
+========+==========================+
| A, Y | Year-end frequency |
+--------+--------------------------+
| AS, YS | Year-start frequency |
+--------+--------------------------+
| Q | Quarter-end frequency |
+--------+--------------------------+
| QS | Quarter-start frequency |
+--------+--------------------------+
| M | Month-end frequency |
+--------+--------------------------+
| MS | Month-start frequency |
+--------+--------------------------+
| D | Day frequency |
+--------+--------------------------+
| H | Hour frequency |
+--------+--------------------------+
| T, min | Minute frequency |
+--------+--------------------------+
| S | Second frequency |
+--------+--------------------------+
Any multiples of the following anchored offsets are also supported.
+----------+--------------------------------------------------------------------+
| Alias | Description |
+==========+====================================================================+
| A(S)-JAN | Annual frequency, anchored at the end (or beginning) of January |
+----------+--------------------------------------------------------------------+
| A(S)-FEB | Annual frequency, anchored at the end (or beginning) of February |
+----------+--------------------------------------------------------------------+
| A(S)-MAR | Annual frequency, anchored at the end (or beginning) of March |
+----------+--------------------------------------------------------------------+
| A(S)-APR | Annual frequency, anchored at the end (or beginning) of April |
+----------+--------------------------------------------------------------------+
| A(S)-MAY | Annual frequency, anchored at the end (or beginning) of May |
+----------+--------------------------------------------------------------------+
| A(S)-JUN | Annual frequency, anchored at the end (or beginning) of June |
+----------+--------------------------------------------------------------------+
| A(S)-JUL | Annual frequency, anchored at the end (or beginning) of July |
+----------+--------------------------------------------------------------------+
| A(S)-AUG | Annual frequency, anchored at the end (or beginning) of August |
+----------+--------------------------------------------------------------------+
| A(S)-SEP | Annual frequency, anchored at the end (or beginning) of September |
+----------+--------------------------------------------------------------------+
| A(S)-OCT | Annual frequency, anchored at the end (or beginning) of October |
+----------+--------------------------------------------------------------------+
| A(S)-NOV | Annual frequency, anchored at the end (or beginning) of November |
+----------+--------------------------------------------------------------------+
| A(S)-DEC | Annual frequency, anchored at the end (or beginning) of December |
+----------+--------------------------------------------------------------------+
| Q(S)-JAN | Quarter frequency, anchored at the end (or beginning) of January |
+----------+--------------------------------------------------------------------+
| Q(S)-FEB | Quarter frequency, anchored at the end (or beginning) of February |
+----------+--------------------------------------------------------------------+
| Q(S)-MAR | Quarter frequency, anchored at the end (or beginning) of March |
+----------+--------------------------------------------------------------------+
| Q(S)-APR | Quarter frequency, anchored at the end (or beginning) of April |
+----------+--------------------------------------------------------------------+
| Q(S)-MAY | Quarter frequency, anchored at the end (or beginning) of May |
+----------+--------------------------------------------------------------------+
| Q(S)-JUN | Quarter frequency, anchored at the end (or beginning) of June |
+----------+--------------------------------------------------------------------+
| Q(S)-JUL | Quarter frequency, anchored at the end (or beginning) of July |
+----------+--------------------------------------------------------------------+
| Q(S)-AUG | Quarter frequency, anchored at the end (or beginning) of August |
+----------+--------------------------------------------------------------------+
| Q(S)-SEP | Quarter frequency, anchored at the end (or beginning) of September |
+----------+--------------------------------------------------------------------+
| Q(S)-OCT | Quarter frequency, anchored at the end (or beginning) of October |
+----------+--------------------------------------------------------------------+
| Q(S)-NOV | Quarter frequency, anchored at the end (or beginning) of November |
+----------+--------------------------------------------------------------------+
| Q(S)-DEC | Quarter frequency, anchored at the end (or beginning) of December |
+----------+--------------------------------------------------------------------+
Finally, the following calendar aliases are supported.
+--------------------------------+---------------------------------------+
| Alias | Date type |
+================================+=======================================+
| standard, gregorian | ``cftime.DatetimeGregorian`` |
+--------------------------------+---------------------------------------+
| proleptic_gregorian | ``cftime.DatetimeProlepticGregorian`` |
+--------------------------------+---------------------------------------+
| noleap, 365_day | ``cftime.DatetimeNoLeap`` |
+--------------------------------+---------------------------------------+
| all_leap, 366_day | ``cftime.DatetimeAllLeap`` |
+--------------------------------+---------------------------------------+
| 360_day | ``cftime.Datetime360Day`` |
+--------------------------------+---------------------------------------+
| julian | ``cftime.DatetimeJulian`` |
+--------------------------------+---------------------------------------+
Examples
--------
This function returns a ``CFTimeIndex``, populated with ``cftime.datetime``
objects associated with the specified calendar type, e.g.
>>> xr.cftime_range(start="2000", periods=6, freq="2MS", calendar="noleap")
CFTimeIndex([2000-01-01 00:00:00, 2000-03-01 00:00:00, 2000-05-01 00:00:00,
2000-07-01 00:00:00, 2000-09-01 00:00:00, 2000-11-01 00:00:00],
dtype='object')
As in the standard pandas function, three of the ``start``, ``end``,
``periods``, or ``freq`` arguments must be specified at a given time, with
the other set to ``None``. See the `pandas documentation
<https://pandas.pydata.org/pandas-docs/stable/generated/pandas.date_range.html#pandas.date_range>`_
for more examples of the behavior of ``date_range`` with each of the
parameters.
See Also
--------
pandas.date_range
"""
# Adapted from pandas.core.indexes.datetimes._generate_range.
if count_not_none(start, end, periods, freq) != 3:
raise ValueError(
"Of the arguments 'start', 'end', 'periods', and 'freq', three "
"must be specified at a time."
)
if start is not None:
start = to_cftime_datetime(start, calendar)
start = _maybe_normalize_date(start, normalize)
if end is not None:
end = to_cftime_datetime(end, calendar)
end = _maybe_normalize_date(end, normalize)
if freq is None:
dates = _generate_linear_range(start, end, periods)
else:
offset = to_offset(freq)
dates = np.array(list(_generate_range(start, end, periods, offset)))
left_closed = False
right_closed = False
if closed is None:
left_closed = True
right_closed = True
elif closed == "left":
left_closed = True
elif closed == "right":
right_closed = True
else:
raise ValueError("Closed must be either 'left', 'right' or None")
if not left_closed and len(dates) and start is not None and dates[0] == start:
dates = dates[1:]
if not right_closed and len(dates) and end is not None and dates[-1] == end:
dates = dates[:-1]
return CFTimeIndex(dates, name=name)<|fim▁end|> | # We follow that behavior here.
_default_month = 3 |
<|file_name|>lunche24.py<|end_file_name|><|fim▁begin|>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Lunch (e24.no)"
language = "no"
url = "http://www.e24.no/lunch/"
start_date = "2009-10-21"
rights = "Børge Lund"
class Crawler(CrawlerBase):
history_capable_date = "2012-11-02"
schedule = "Mo,Tu,We,Th,Fr,Sa"
time_zone = "Europe/Oslo"
<|fim▁hole|> def crawl(self, pub_date):
url = "http://static.e24.no/images/comics/lunch_%s.gif" % (
pub_date.strftime("%Y%m%d")
)
return CrawlerImage(url)<|fim▁end|> | |
<|file_name|>user.cpp<|end_file_name|><|fim▁begin|>#include <boost/lexical_cast.hpp>
#include <disccord/models/user.hpp>
namespace disccord
{
namespace models
{
user::user()
: username(""), avatar(), email(), discriminator(0),
bot(false), mfa_enabled(), verified()
{ }
user::~user()
{ }
void user::decode(web::json::value json)
{
entity::decode(json);
username = json.at("username").as_string();
// HACK: use boost::lexical_cast here since it safely
// validates values
auto str_js = json.at("discriminator");
discriminator = boost::lexical_cast<uint16_t>(str_js.as_string());
#define get_field(var, conv) \
if (json.has_field(#var)) { \
auto field = json.at(#var); \
if (!field.is_null()) { \
var = decltype(var)(field.conv()); \<|fim▁hole|> var = decltype(var)(); \
}
get_field(avatar, as_string);
bot = json.at("bot").as_bool();
//get_field(bot, as_bool);
get_field(mfa_enabled, as_bool);
get_field(verified, as_bool);
get_field(email, as_string);
#undef get_field
}
void user::encode_to(std::unordered_map<std::string,
web::json::value> &info)
{
entity::encode_to(info);
info["username"] = web::json::value(get_username());
info["discriminator"] =
web::json::value(std::to_string(get_discriminator()));
if (get_avatar().is_specified())
info["avatar"] = get_avatar();
info["bot"] = web::json::value(get_bot());
if (get_mfa_enabled().is_specified())
info["mfa_enabled"] = get_mfa_enabled();
if (get_verified().is_specified())
info["verified"] = get_verified();
if (get_email().is_specified())
info["email"] = get_email();
}
#define define_get_method(field_name) \
decltype(user::field_name) user::get_##field_name() { \
return field_name; \
}
define_get_method(username)
define_get_method(discriminator)
define_get_method(avatar)
define_get_method(bot)
define_get_method(mfa_enabled)
define_get_method(verified)
define_get_method(email)
util::optional<std::string> user::get_avatar_url()
{
if (get_avatar().is_specified())
{
std::string url = "https://cdn.discordapp.com/avatars/" +
std::to_string(get_id()) + "/" +
get_avatar().get_value()+".png?size=1024";
return util::optional<std::string>(url);
}
else
return util::optional<std::string>::no_value();
}
#undef define_get_method
}
}<|fim▁end|> | } else { \
var = decltype(var)::no_value(); \
} \
} else { \ |
<|file_name|>ioreg.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Gamari <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*!
## I/O register interface
On most embedded platforms interaction with hardware peripherals
occurs through memory-mapped registers. This crate provides a syntax
extension for `rustc` to generate convenient, type-safe register
interfaces from a minimal definition.
### Concepts
A *register* is group of bits (typically a word, although on some
platforms smaller). By manipulating the bits of a register one can
affect the state of the associated peripheral.
### Example register definition
Let's consider a register block for a simple UART-like peripheral. The
documentation for the peripheral will likely have a table defining the
interface resembling the following,
```notrust
offset name description
─────── ──────── ──────────────────────────
0x0 CR Configuration register
bit 0 RXE Receive enable
bit 1 TXE Transmit enable
bit 2 RXIE Recieve interrupt enable
bit 3 TXIE Transmit interrupt enable
bit 12:4 BR Baudrate
bit 16:14 PARITY Parity
0x0 No parity
0x1 Reserved
0x2 Even parity
0x3 Odd parity
0x4 SR Status register
bit 0 RXNE Receive data register not empty flag (read-only)
bit 1 TXE Transmit data register not empty flag (read-only)
bit 2 FE Framing error flag (set to clear)
0x8 DR Data register
bits 7:0 D Read returns received data
Write transmits data
```
The syntax extension is invoked through through the `ioregs!` macro. A
register definition for the above peripheral might look like this,
```
ioregs!(UART = {
0x0 => reg32 cr {
0 => rxe,
1 => txe,
2 => rxie,
3 => txie,
4..12 => br,
14..16 => parity {
0x0 => NoParity,
0x2 => EvenParity,
0x3 => OddParity,
}
}
0x4 => reg32 sr {
0 => rxne: ro,
1 => txe: ro,
2 => fe: set_to_clear,
}
0x8 => reg32 dr {
0..7 => d
}
})
```
Here we've defined a register block called `UART` consisting of a
three registers: `cr`, `sr`, and `dr`. Each register definition
consists of an offset from the beginning of the register block width,
a register type giving the width of the register (`reg32` in this
case), a name, and a list of fields.
The `cr` register has four boolean flags, an integer
field `br`, and a field `parity` with four possible values
(`NoParity`, `EvenParity`, and `OddParity`). Each field is defined by
a bit or bit range, a name, some optional modifiers (e.g. `ro` in the
case of `rxne`), and an optional list of values.
This register definition will produce a variety of types, along with
associated accessor methods for convenient, safe manipulation of the
described registers. In the process of generating these, `ioregs!`
will perform a variety of sanity checks (e.g. ensuring that registers
and bitfields are free of overlap).
#### Documenting register definitions
It is highly recommended that register definitions include
docstrings. Registers, fields, and `enum` values can all be annotated
with docstrings with the typical Rust doc comment syntax. Both outer
(`/// comment`) and inner (`//! comment`) comments are accepted. Inner
comments apply to the item to which the current block belongs whereas
outer comments apply to the item that follows. In addition,
trailing comments are supported with the `//=` syntax. These apply
to the preceding item, allowing definitions and associated comments
to inhabit the same line. Multiple successive comments of the same
type will be concatenated together into a single doc comment.
For instance, we might document the above example as follows,
```
ioregs!(UART = {
/// Control register
/// Here is some discussion of the function of the `cr` register.
0x0 => reg32 cr {
0 => rxe, //= Receive enable
1 => txe, //= Transmit enable
2 => rxie, //= Receive interrupt enable
3 => txie, //= Transmit interrupt enable
4..12 => br, //= Baud rate
14..16 => parity { //! Parity selection
0x0 => NoParity, //= No parity
0x2 => EvenParity, //= Even parity
0x3 => OddParity, //= Odd parity
}
}
...
})
```
#### Nesting register blocks
In addition to primitive register types (e.g. `reg32`), one can also
nest groups of logically related registers. For instance, in the case
of a DMA peripheral it is common that the same block of registers will
be replicated, one for each DMA channel. This can be accomplished with
`ioregs!` as follows,
```
ioregs!(DMA = {
0x0 => reg32 cr { ... }
0x10 => group channel[4] {
0x0 => reg32 cr { ... }
0x4 => reg32 sr { ... }
}
0x30 => reg32 sr { ... }
})
```
This will produce the following layout in memory,
```notrust<|fim▁hole|>──────── ──────────────
0x0 cr
0x10 channel[0].cr
0x14 channel[0].sr
0x18 channel[1].cr
0x1c channel[1].sr
0x20 channel[2].cr
0x24 channel[2].sr
0x28 channel[3].cr
0x2c channel[3].sr
0x30 sr
```
### What is produced
The `ioregs!` extension produces a variety of types and methods for
each register and field. Let's start by examining the top-level types
representing the structure of the interface.
```
pub enum UART_cr_parity {
NoParity = 0, EvenParity = 2, OddParity = 3,
}
pub struct UART_cr { ... }
pub struct UART_sr { ... }
pub struct UART_dr { ... }
pub struct UART {
pub cr: UART_cr,
pub sr: UART_sr,
pub dr: UART_dr,
}
```
The `UART` struct is the the "entry-point" into the interface and is
ultimately what will be instantiated to represent the peripheral's
register window, typically as a `static extern` item,
```
extern { pub static UART: UART; }
```
The register structs (`UART_cr`, `UART_sr`, and `UART_dr`)
have no user visible members but expose a variety of methods. Let's
look at `cr` in particular,
```
impl UART_cr {
pub fn get(&self) -> UART_cr_Get { ... }
pub fn set_rxe(&self, new_value: bool) -> UART_cr_Update { ... }
pub fn rxe(&self) -> bool { ... }
// similar methods for `txe`, `rxie`, `txie`
pub fn set_br(&self, new_value: u32) -> UART_cr_Update { ... }
pub fn br(&self) -> u32 { ... }
pub fn set_parity(&self, new_value: UART_cr_parity) -> UART_cr_Update { ... }
pub fn parity(&self) -> UART_cr_parity { ... }
}
```
Here we see each field has a corresponding "get" function (e.g. `rxe`,
`br`, and `parity`) as well as a "set" function. Note that the set
function returns a `UART_cr_Update` object. This object mirrors the
setter methods of `UART_cr`, collecting multiple field updates within
a register, performing them on destruction with the `Drop` trait,
```
pub struct UART_cr_Update { ... }
impl Drop for UART_cr_Update { ... }
impl UART_cr_Update {
pub fn set_rxe<'a>(&'a mut self, new_value: bool) -> &'a mut UART_cr_Update { ... }
pub fn set_txe<'a>(&'a mut self, new_value: bool) -> &'a mut UART_cr_Update { ... }
...
}
```
As the set methods return references to `self` they can be easily
chained together. For instance, we can update the `rxe` and `txe`
fields of the `cr` register atomically,
```
UART.cr.set_rxe(true).set_txe(false);
```
In addition to get and set methods, `UART_cr` also implements a `get`
method which returns a `UART_cr_Get` object mirroring the get methods
of `UART_cr`. This object captures the state of the register allowing
field values to be later atomically queried,
```
let cr: UART_cr_Get = UART.cr.get();
format!("txe={}, rxe={}, br={}", cr.txe(), cr.rxe(), cr.br())
```
In the case of read-only (resp. write-only) fields the set (resp. get)
method is omitted. In the case of `set_to_clear` fields a `clear`
method is instead produced in place of `set`. For instance, in the
case of the `sr` register's `fe` flag,
```
pub fn fe(self: &UART_sr_Getter) -> bool { ... }
pub fn clear_fe(self: &UART_sr_Update) -> UART_sr_Update { ... }
```
### Informal grammar
In the below discussion `THING, ...` will denote a list of one or more
`THING`s. The `THING`s must be comma separated except when ending
with a brace-enclosed block. Optional elements are enclosed in `⟦...⟧`
brackets.
The `ioregs!` macro expects a definition of the form,
```
ioregs!(IDENT = { REG, ... })
```
Where a `REG` is either a register group,
```notrust
OFFSET => group IDENT⟦[COUNT]⟧ { REG, ... }
```
or a primitive register,
```notrust
OFFSET => TYPE IDENT⟦[COUNT]⟧ { FIELD, ... }
```
`COUNT` is an integer count and a register `TYPE` is one of `reg8` (a
one byte wide register), `reg16` (two bytes wide), or `reg32` (four
bytes wide).
A field is given by
```notrust
BITS => IDENT⟦[COUNT]⟧ ⟦: MODIFIER⟧ ⟦{ VALUE, ... }⟧
```
where `BITS` is either an inclusive range of integers (`N..M`) or a
single integer (shorthand for `N..N`). If a list of values is given
the field is of an enumerated type. Otherwise single bit fields are
of type `bool` and wider fields unsigned integers (in particular, of
the same width as the containing register).
A `MODIFIER` is one of `rw` (read/write), `ro` (read-only), `wo`
(write-only), or `set_to_clear` (a flag which can be cleared by
setting to one).
A `VALUE` is given by,
```notrust
N => NAME
```
*/
#![feature(quote, plugin_registrar)]
#![crate_name="ioreg"]
#![crate_type="dylib"]
#![allow(unstable)]
extern crate rustc;
extern crate syntax;
extern crate serialize;
use rustc::plugin::Registry;
use syntax::ast;
use syntax::ptr::P;
use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, MacResult};
use syntax::util::small_vector::SmallVector;
pub mod node;
pub mod parser;
pub mod builder;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("ioregs", macro_ioregs);
}
pub fn macro_ioregs(cx: &mut ExtCtxt, _: Span, tts: &[ast::TokenTree])
-> Box<MacResult+'static> {
match parser::Parser::new(cx, tts).parse_ioregs() {
Some(group) => {
let mut builder = builder::Builder::new();
let items = builder.emit_items(cx, group);
MacItems::new(items)
},
None => {
panic!();
}
}
}
pub struct MacItems {
items: Vec<P<ast::Item>>
}
impl MacItems {
pub fn new(items: Vec<P<ast::Item>>) -> Box<MacResult+'static> {
Box::new(MacItems { items: items })
}
}
impl MacResult for MacItems {
fn make_items(self: Box<MacItems>) -> Option<SmallVector<P<ast::Item>>> {
Some(SmallVector::many(self.items.clone()))
}
}<|fim▁end|> | address register |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import hashlib
import json
import sys
import traceback
from datetime import datetime, timedelta
from functools import wraps
from uuid import uuid4
import newrelic.agent
import waffle
from constance import config
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.db.models import signals
from django.utils.decorators import available_attrs
from django.utils.functional import cached_property
from django.utils.translation import ugettext, ugettext_lazy as _
from pyquery import PyQuery
from taggit.managers import TaggableManager
from taggit.models import ItemBase, TagBase
from taggit.utils import edit_string_for_tags, parse_tags
from tidings.models import NotificationsMixin
from kuma.core.cache import memcache
from kuma.core.exceptions import ProgrammingError
from kuma.core.i18n import get_language_mapping
from kuma.core.urlresolvers import reverse
from kuma.search.decorators import register_live_index
from kuma.spam.models import AkismetSubmission, SpamAttempt
from . import kumascript
from .constants import (DEKI_FILE_URL, DOCUMENT_LAST_MODIFIED_CACHE_KEY_TMPL,
KUMA_FILE_URL, REDIRECT_CONTENT, REDIRECT_HTML,
TEMPLATE_TITLE_PREFIX)
from .content import parse as parse_content
from .content import (Extractor, H2TOCFilter, H3TOCFilter, SectionTOCFilter,
get_content_sections, get_seo_description)
from .exceptions import (DocumentRenderedContentNotAvailable,
DocumentRenderingInProgress, PageMoveError,
SlugCollision, UniqueCollision)
from .jobs import DocumentContributorsJob, DocumentZoneStackJob
from .managers import (DeletedDocumentManager, DocumentAdminManager,
DocumentManager, RevisionIPManager,
TaggedDocumentManager, TransformManager)
from .signals import render_done
from .templatetags.jinja_helpers import absolutify
from .utils import tidy_content
def cache_with_field(field_name):
"""Decorator for generated content methods.
If the backing model field is null, or kwarg force_fresh is True, call the
decorated method to generate and return the content.
Otherwise, just return the value in the backing model field.
"""
def decorator(fn):
@wraps(fn, assigned=available_attrs(fn))
def wrapper(self, *args, **kwargs):
force_fresh = kwargs.pop('force_fresh', False)
# Try getting the value using the DB field.
field_val = getattr(self, field_name)
if field_val is not None and not force_fresh:
return field_val
# DB field is blank, or we're forced to generate it fresh.
field_val = fn(self, force_fresh=force_fresh)
setattr(self, field_name, field_val)
return field_val
return wrapper
return decorator
def _inherited(parent_attr, direct_attr):
"""Return a descriptor delegating to an attr of the original document.
If `self` is a translation, the descriptor delegates to the attribute
`parent_attr` from the original document. Otherwise, it delegates to the
attribute `direct_attr` from `self`.
Use this only on a reference to another object, like a ManyToMany or a
ForeignKey. Using it on a normal field won't work well, as it'll preclude
the use of that field in QuerySet field lookups. Also, ModelForms that are
passed instance=this_obj won't see the inherited value.
"""
getter = lambda self: (getattr(self.parent, parent_attr)
if self.parent and self.parent.id != self.id
else getattr(self, direct_attr))
setter = lambda self, val: (setattr(self.parent, parent_attr, val)
if self.parent and self.parent.id != self.id
else setattr(self, direct_attr, val))
return property(getter, setter)
def valid_slug_parent(slug, locale):
slug_bits = slug.split('/')
slug_bits.pop()
parent = None
if slug_bits:
parent_slug = '/'.join(slug_bits)
try:
parent = Document.objects.get(locale=locale, slug=parent_slug)
except Document.DoesNotExist:
raise Exception(
ugettext('Parent %s does not exist.' % (
'%s/%s' % (locale, parent_slug))))
return parent
class DocumentTag(TagBase):
"""A tag indexing a document"""
class Meta:
verbose_name = _('Document Tag')
verbose_name_plural = _('Document Tags')
def tags_for(cls, model, instance=None, **extra_filters):
"""
Sadly copied from taggit to work around the issue of not being
able to use the TaggedItemBase class that has tag field already
defined.
"""
kwargs = extra_filters or {}
if instance is not None:
kwargs.update({
'%s__content_object' % cls.tag_relname(): instance
})
return cls.tag_model().objects.filter(**kwargs)
kwargs.update({
'%s__content_object__isnull' % cls.tag_relname(): False
})
return cls.tag_model().objects.filter(**kwargs).distinct()
class TaggedDocument(ItemBase):
"""Through model, for tags on Documents"""
content_object = models.ForeignKey('Document')
tag = models.ForeignKey(DocumentTag, related_name="%(app_label)s_%(class)s_items")
objects = TaggedDocumentManager()
@classmethod
def tags_for(cls, *args, **kwargs):
return tags_for(cls, *args, **kwargs)
class DocumentAttachment(models.Model):
"""
Intermediary between Documents and Attachments. Allows storing the
user who attached a file to a document, and a (unique for that
document) name for referring to the file from the document.
"""
file = models.ForeignKey(
'attachments.Attachment',
related_name='document_attachments',
)
document = models.ForeignKey(
'wiki.Document',
related_name='attached_files',
)
attached_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True)
name = models.TextField()
# whether or not this attachment was uploaded for the document
is_original = models.BooleanField(
verbose_name=_('uploaded to the document'),
default=False,
)
# whether or not this attachment is linked in the document's content
is_linked = models.BooleanField(
verbose_name=_('linked in the document content'),
default=False,
)
class Meta:
db_table = 'attachments_documentattachment'
def __unicode__(self):
return u'"%s" for document "%s"' % (self.file, self.document)
def clean(self):
if self.pk and (self.document.files.through.objects.exclude(pk=self.pk)
.exists()):
raise ValidationError(
_("Attachment %(attachment_id)s can't be attached "
"multiple times to document %(document_id)s") %
{'attachment_id': self.pk, 'document_id': self.document.pk}
)
@register_live_index
class Document(NotificationsMixin, models.Model):
"""A localized knowledgebase document, not revision-specific."""
TOC_FILTERS = {
1: SectionTOCFilter,
2: H2TOCFilter,
3: H3TOCFilter,
4: SectionTOCFilter
}
title = models.CharField(max_length=255, db_index=True)
slug = models.CharField(max_length=255, db_index=True)
# NOTE: Documents are indexed by tags, but tags are edited in Revisions.
# Also, using a custom through table to isolate Document tags from those
# used in other models and apps. (Works better than namespaces, for
# completion and such.)
tags = TaggableManager(through=TaggedDocument)
# Is this document a template or not?
is_template = models.BooleanField(default=False, editable=False,
db_index=True)
# Is this a redirect or not?
is_redirect = models.BooleanField(default=False, editable=False,
db_index=True)
# Is this document localizable or not?
is_localizable = models.BooleanField(default=True, db_index=True)
locale = models.CharField(
max_length=7,
choices=settings.LANGUAGES,
default=settings.WIKI_DEFAULT_LANGUAGE,
db_index=True,
)
# Latest approved revision. L10n dashboard depends on this being so (rather
# than being able to set it to earlier approved revisions).
current_revision = models.ForeignKey(
'Revision',
null=True,
related_name='current_for+',
)
# The Document I was translated from. NULL if this doc is in the default
# locale or it is nonlocalizable. TODO: validate against
# settings.WIKI_DEFAULT_LANGUAGE.
parent = models.ForeignKey(
'self',
related_name='translations',
null=True,
blank=True,
)
parent_topic = models.ForeignKey(
'self',
related_name='children',
null=True,
blank=True,
)
# The files attached to the document, represented by a custom intermediate
# model so we can store some metadata about the relation
files = models.ManyToManyField(
'attachments.Attachment',
through=DocumentAttachment,
)
# JSON representation of Document for API results, built on save
json = models.TextField(editable=False, blank=True, null=True)
# Raw HTML of approved revision's wiki markup
html = models.TextField(editable=False)
# Cached result of kumascript and other offline processors (if any)
rendered_html = models.TextField(editable=False, blank=True, null=True)
# Errors (if any) from the last rendering run
rendered_errors = models.TextField(editable=False, blank=True, null=True)
# Whether or not to automatically defer rendering of this page to a queued
# offline task. Generally used for complex pages that need time
defer_rendering = models.BooleanField(default=False, db_index=True)
# Timestamp when this document was last scheduled for a render
render_scheduled_at = models.DateTimeField(null=True, db_index=True)
# Timestamp when a render for this document was last started
render_started_at = models.DateTimeField(null=True, db_index=True)
# Timestamp when this document was last rendered
last_rendered_at = models.DateTimeField(null=True, db_index=True)
# Maximum age (in seconds) before this document needs re-rendering
render_max_age = models.IntegerField(blank=True, null=True)
# Time after which this document needs re-rendering
render_expires = models.DateTimeField(blank=True, null=True, db_index=True)
# Whether this page is deleted.
deleted = models.BooleanField(default=False, db_index=True)
# Last modified time for the document. Should be equal-to or greater than
# the current revision's created field
modified = models.DateTimeField(auto_now=True, null=True, db_index=True)
body_html = models.TextField(editable=False, blank=True, null=True)
quick_links_html = models.TextField(editable=False, blank=True, null=True)
zone_subnav_local_html = models.TextField(editable=False,
blank=True, null=True)
toc_html = models.TextField(editable=False, blank=True, null=True)
summary_html = models.TextField(editable=False, blank=True, null=True)
summary_text = models.TextField(editable=False, blank=True, null=True)
uuid = models.UUIDField(default=uuid4, editable=False)
class Meta(object):
unique_together = (
('parent', 'locale'),
('slug', 'locale'),
)
permissions = (
('view_document', 'Can view document'),
('add_template_document', 'Can add Template:* document'),
('change_template_document', 'Can change Template:* document'),
('move_tree', 'Can move a tree of documents'),
('purge_document', 'Can permanently delete document'),
('restore_document', 'Can restore deleted document'),
)
objects = DocumentManager()
deleted_objects = DeletedDocumentManager()
admin_objects = DocumentAdminManager()
def __unicode__(self):
return u'%s (%s)' % (self.get_absolute_url(), self.title)
@cache_with_field('body_html')
def get_body_html(self, *args, **kwargs):
html = self.rendered_html and self.rendered_html or self.html
sections_to_hide = ('Quick_Links', 'Subnav')
doc = parse_content(html)
for sid in sections_to_hide:
doc = doc.replaceSection(sid, '<!-- -->')
doc.injectSectionIDs()
doc.annotateLinks(base_url=settings.SITE_URL)
return doc.serialize()
@cache_with_field('quick_links_html')
def get_quick_links_html(self, *args, **kwargs):
return self.get_section_content('Quick_Links')
@cache_with_field('zone_subnav_local_html')
def get_zone_subnav_local_html(self, *args, **kwargs):
return self.get_section_content('Subnav')
@cache_with_field('toc_html')
def get_toc_html(self, *args, **kwargs):
if not self.current_revision:
return ''
toc_depth = self.current_revision.toc_depth
if not toc_depth:
return ''
html = self.rendered_html and self.rendered_html or self.html
return (parse_content(html)
.injectSectionIDs()
.filter(self.TOC_FILTERS[toc_depth])
.serialize())
@cache_with_field('summary_html')
def get_summary_html(self, *args, **kwargs):
return self.get_summary(strip_markup=False)
@cache_with_field('summary_text')
def get_summary_text(self, *args, **kwargs):
return self.get_summary(strip_markup=True)
def regenerate_cache_with_fields(self):
"""Regenerate fresh content for all the cached fields"""
# TODO: Maybe @cache_with_field can build a registry over which this
# method can iterate?
self.get_body_html(force_fresh=True)
self.get_quick_links_html(force_fresh=True)
self.get_zone_subnav_local_html(force_fresh=True)
self.get_toc_html(force_fresh=True)
self.get_summary_html(force_fresh=True)
self.get_summary_text(force_fresh=True)
def get_zone_subnav_html(self):
"""
Search from self up through DocumentZone stack, returning the first
zone nav HTML found.
"""
src = self.get_zone_subnav_local_html()
if src:
return src
for zone in DocumentZoneStackJob().get(self.pk):
src = zone.document.get_zone_subnav_local_html()
if src:
return src
def get_section_content(self, section_id, ignore_heading=True):
"""
Convenience method to extract the rendered content for a single section
"""
if self.rendered_html:
content = self.rendered_html
else:
content = self.html
return self.extract.section(content, section_id, ignore_heading)
def calculate_etag(self, section_id=None):
"""Calculate an etag-suitable hash for document content or a section"""
if not section_id:
content = self.html
else:
content = self.extract.section(self.html, section_id)
return '"%s"' % hashlib.sha1(content.encode('utf8')).hexdigest()
def current_or_latest_revision(self):
"""Returns current revision if there is one, else the last created
revision."""
rev = self.current_revision
if not rev:
revs = self.revisions.order_by('-created')
if revs.exists():
rev = revs[0]
return rev
@property
def is_rendering_scheduled(self):
"""Does this have a rendering scheduled?"""
if not self.render_scheduled_at:
return False
# Check whether a scheduled rendering has waited for too long. Assume
# failure, in this case, and allow another scheduling attempt.
timeout = config.KUMA_DOCUMENT_RENDER_TIMEOUT
max_duration = timedelta(seconds=timeout)
duration = datetime.now() - self.render_scheduled_at
if duration > max_duration:
return False
if not self.last_rendered_at:
return True
return self.render_scheduled_at > self.last_rendered_at
@property
def is_rendering_in_progress(self):
"""Does this have a rendering in progress?"""
if not self.render_started_at:
# No start time, so False.
return False
# Check whether an in-progress rendering has gone on for too long.
# Assume failure, in this case, and allow another rendering attempt.
timeout = config.KUMA_DOCUMENT_RENDER_TIMEOUT
max_duration = timedelta(seconds=timeout)
duration = datetime.now() - self.render_started_at
if duration > max_duration:
return False
if not self.last_rendered_at:
# No rendering ever, so in progress.
return True
# Finally, if the render start is more recent than last completed
# render, then we have one in progress.
return self.render_started_at > self.last_rendered_at
@newrelic.agent.function_trace()
def get_rendered(self, cache_control=None, base_url=None):
"""Attempt to get rendered content for this document"""
# No rendered content yet, so schedule the first render.
if not self.rendered_html:
try:
self.schedule_rendering(cache_control, base_url)
except DocumentRenderingInProgress:
# Unable to trigger a rendering right now, so we bail.
raise DocumentRenderedContentNotAvailable
# If we have a cache_control directive, try scheduling a render.
if cache_control:
try:
self.schedule_rendering(cache_control, base_url)
except DocumentRenderingInProgress:
pass
# Parse JSON errors, if available.
errors = None
try:
errors = (self.rendered_errors and
json.loads(self.rendered_errors) or None)
except ValueError:
pass
# If the above resulted in an immediate render, we might have content.
if not self.rendered_html:
if errors:
return ('', errors)
else:
# But, no such luck, so bail out.
raise DocumentRenderedContentNotAvailable
return (self.rendered_html, errors)
def schedule_rendering(self, cache_control=None, base_url=None):
"""
Attempt to schedule rendering. Honor the deferred_rendering field to
decide between an immediate or a queued render.
"""
# Avoid scheduling a rendering if already scheduled or in progress.
if self.is_rendering_scheduled or self.is_rendering_in_progress:
return False
# Note when the rendering was scheduled. Kind of a hack, doing a quick
# update and setting the local property rather than doing a save()
now = datetime.now()
Document.objects.filter(pk=self.pk).update(render_scheduled_at=now)
self.render_scheduled_at = now
if (waffle.switch_is_active('wiki_force_immediate_rendering') or
not self.defer_rendering):
# Attempt an immediate rendering.
self.render(cache_control, base_url)
else:
# Attempt to queue a rendering. If celery.conf.ALWAYS_EAGER is
# True, this is also an immediate rendering.
from . import tasks
tasks.render_document.delay(self.pk, cache_control, base_url)
def render(self, cache_control=None, base_url=None, timeout=None):
"""
Render content using kumascript and any other services necessary.
"""
if not base_url:
base_url = settings.SITE_URL
# Disallow rendering while another is in progress.
if self.is_rendering_in_progress:
raise DocumentRenderingInProgress
# Note when the rendering was started. Kind of a hack, doing a quick
# update and setting the local property rather than doing a save()
now = datetime.now()
Document.objects.filter(pk=self.pk).update(render_started_at=now)
self.render_started_at = now
# Perform rendering and update document
if not config.KUMASCRIPT_TIMEOUT:
# A timeout of 0 should shortcircuit kumascript usage.
self.rendered_html, self.rendered_errors = self.html, []
else:
self.rendered_html, errors = kumascript.get(self, cache_control,
base_url,
timeout=timeout)
self.rendered_errors = errors and json.dumps(errors) or None
# Regenerate the cached content fields
self.regenerate_cache_with_fields()
# Finally, note the end time of rendering and update the document.
self.last_rendered_at = datetime.now()
# If this rendering took longer than we'd like, mark it for deferred
# rendering in the future.
timeout = config.KUMA_DOCUMENT_FORCE_DEFERRED_TIMEOUT
max_duration = timedelta(seconds=timeout)
duration = self.last_rendered_at - self.render_started_at
if duration >= max_duration:
self.defer_rendering = True
# TODO: Automatically clear the defer_rendering flag if the rendering
# time falls under the limit? Probably safer to require manual
# intervention to free docs from deferred jail.
if self.render_max_age:
# If there's a render_max_age, automatically update render_expires
self.render_expires = (datetime.now() +
timedelta(seconds=self.render_max_age))
else:
# Otherwise, just clear the expiration time as a one-shot
self.render_expires = None
self.save()
render_done.send(sender=self.__class__, instance=self)
def get_summary(self, strip_markup=True, use_rendered=True):
"""
Attempt to get the document summary from rendered content, with
fallback to raw HTML
"""
if use_rendered and self.rendered_html:
src = self.rendered_html
else:
src = self.html
return get_seo_description(src, self.locale, strip_markup)
def build_json_data(self):
html = self.rendered_html and self.rendered_html or self.html
content = parse_content(html).injectSectionIDs().serialize()
sections = get_content_sections(content)
translations = []
if self.pk:
for translation in self.other_translations:
revision = translation.current_revision
if revision.summary:
summary = revision.summary
else:
summary = translation.get_summary(strip_markup=False)
translations.append({
'last_edit': revision.created.isoformat(),
'locale': translation.locale,
'localization_tags': list(revision.localization_tags
.names()),
'review_tags': list(revision.review_tags.names()),
'summary': summary,
'tags': list(translation.tags.names()),
'title': translation.title,
'url': translation.get_absolute_url(),
'uuid': str(translation.uuid)
})
if self.current_revision:
review_tags = list(self.current_revision.review_tags.names())
localization_tags = list(self.current_revision
.localization_tags
.names())
last_edit = self.current_revision.created.isoformat()
if self.current_revision.summary:
summary = self.current_revision.summary
else:
summary = self.get_summary(strip_markup=False)
else:
review_tags = []
localization_tags = []
last_edit = ''
summary = ''
if not self.pk:
tags = []
else:
tags = list(self.tags.names())
now_iso = datetime.now().isoformat()
if self.modified:
modified = self.modified.isoformat()
else:
modified = now_iso
return {
'title': self.title,
'label': self.title,
'url': self.get_absolute_url(),
'id': self.id,
'uuid': str(self.uuid),
'slug': self.slug,
'tags': tags,
'review_tags': review_tags,
'localization_tags': localization_tags,
'sections': sections,
'locale': self.locale,
'summary': summary,
'translations': translations,
'modified': modified,
'json_modified': now_iso,
'last_edit': last_edit
}
def get_json_data(self, stale=True):
"""Returns a document in object format for output as JSON.
The stale parameter, when True, accepts stale cached data even after
the document has been modified."""
# Have parsed data & don't care about freshness? Here's a quick out..
curr_json_data = getattr(self, '_json_data', None)
if curr_json_data and stale:
return curr_json_data
# Attempt to parse the current contents of self.json, taking care in
# case it's empty or broken JSON.
self._json_data = {}
if self.json:
try:
self._json_data = json.loads(self.json)
except (TypeError, ValueError):
pass
# Try to get ISO 8601 datestamps for the doc and the json
json_lmod = self._json_data.get('json_modified', '')
doc_lmod = self.modified.isoformat()
# If there's no parsed data or the data is stale & we care, it's time
# to rebuild the cached JSON data.
if (not self._json_data) or (not stale and doc_lmod > json_lmod):
self._json_data = self.build_json_data()
self.json = json.dumps(self._json_data)
Document.objects.filter(pk=self.pk).update(json=self.json)
return self._json_data
@cached_property
def extract(self):
return Extractor(self)
def natural_key(self):
return (self.locale, self.slug)
@staticmethod
def natural_key_hash(keys):
natural_key = u'/'.join(keys)
return hashlib.md5(natural_key.encode('utf8')).hexdigest()
@cached_property
def natural_cache_key(self):
return self.natural_key_hash(self.natural_key())
def _existing(self, attr, value):
"""Return an existing doc (if any) in this locale whose `attr` attr is
equal to mine."""
return Document.objects.filter(locale=self.locale, **{attr: value})
def _raise_if_collides(self, attr, exception):
"""Raise an exception if a page of this title/slug already exists."""
if self.id is None or hasattr(self, 'old_' + attr):
# If I am new or my title/slug changed...
existing = self._existing(attr, getattr(self, attr))
if existing.exists():
raise exception(existing[0])
def clean(self):
"""Translations can't be localizable."""
self._clean_is_localizable()
def _clean_is_localizable(self):
"""is_localizable == allowed to have translations. Make sure that isn't
violated.
For default language (en-US), is_localizable means it can have
translations. Enforce:
* is_localizable=True if it has translations
* if has translations, unable to make is_localizable=False
For non-default langauges, is_localizable must be False.
"""
if self.locale != settings.WIKI_DEFAULT_LANGUAGE:
self.is_localizable = False
# Can't save this translation if parent not localizable
if (self.parent and self.parent.id != self.id and
not self.parent.is_localizable):
raise ValidationError('"%s": parent "%s" is not localizable.' % (
unicode(self), unicode(self.parent)))
# Can't make not localizable if it has translations
# This only applies to documents that already exist, hence self.pk
if self.pk and not self.is_localizable and self.translations.exists():
raise ValidationError('"%s": document has %s translations but is '
'not localizable.' %
(unicode(self), self.translations.count()))
def _attr_for_redirect(self, attr, template):
"""Return the slug or title for a new redirect.
`template` is a Python string template with "old" and "number" tokens
used to create the variant.
"""
def unique_attr():
"""Return a variant of getattr(self, attr) such that there is no
Document of my locale with string attribute `attr` equal to it.
Never returns the original attr value.
"""
# "My God, it's full of race conditions!"
i = 1
while True:
new_value = template % dict(old=getattr(self, attr), number=i)
if not self._existing(attr, new_value).exists():
return new_value
i += 1
old_attr = 'old_' + attr
if hasattr(self, old_attr):
# My slug (or title) is changing; we can reuse it for the redirect.
return getattr(self, old_attr)
else:
# Come up with a unique slug (or title):
return unique_attr()
def revert(self, revision, user, comment=None):
"""
Reverts the given revision by creating a new one.
- Sets its comment to the given value and points the new revision
to the old revision
- Keeps review tags
- Make new revision the current one of the document
"""
# remember the current revision's primary key for later
old_revision_pk = revision.pk
# get a list of review tag names for later
old_review_tags = list(revision.review_tags.names())
with transaction.atomic():
# reset primary key
revision.pk = None
# add a sensible comment
revision.comment = ("Revert to revision of %s by %s" %
(revision.created, revision.creator))
if comment:
revision.comment = u'%s: "%s"' % (revision.comment, comment)
revision.created = datetime.now()
revision.creator = user
if revision.document.original.pk == self.pk:
revision.based_on_id = old_revision_pk
revision.save()
# set review tags
if old_review_tags:
revision.review_tags.set(*old_review_tags)
# populate model instance with fresh data from database
revision.refresh_from_db()
# make this new revision the current one for the document
revision.make_current()
return revision
def revise(self, user, data, section_id=None):
"""Given a dict of changes to make, build and save a new Revision to
revise this document"""
curr_rev = self.current_revision
new_rev = Revision(creator=user, document=self, content=self.html)
for n in ('title', 'slug', 'render_max_age'):
setattr(new_rev, n, getattr(self, n))
if curr_rev:
new_rev.toc_depth = curr_rev.toc_depth
original_doc = curr_rev.document.original
if original_doc == self:
new_rev.based_on = curr_rev
else:
new_rev.based_on = original_doc.current_revision
# Accept optional field edits...
new_title = data.get('title', False)
new_rev.title = new_title and new_title or self.title
new_tags = data.get('tags', False)
new_rev.tags = (new_tags and new_tags or
edit_string_for_tags(self.tags.all()))
new_review_tags = data.get('review_tags', False)
if new_review_tags:
review_tags = new_review_tags
elif curr_rev:
review_tags = edit_string_for_tags(curr_rev.review_tags.all())
else:
review_tags = ''
new_rev.summary = data.get('summary', '')
# To add comment, when Technical/Editorial review completed
new_rev.comment = data.get('comment', '')
# Accept HTML edits, optionally by section
new_html = data.get('content', data.get('html', False))
if new_html:
if not section_id:
new_rev.content = new_html
else:
content = parse_content(self.html)
new_rev.content = (content.replaceSection(section_id, new_html)
.serialize())
# Finally, commit the revision changes and return the new rev.
new_rev.save()
new_rev.review_tags.set(*parse_tags(review_tags))
return new_rev
@cached_property
def last_modified_cache_key(self):
return DOCUMENT_LAST_MODIFIED_CACHE_KEY_TMPL % self.natural_cache_key
def fill_last_modified_cache(self):
"""
Convert python datetime to Unix epoch seconds. This is more
easily digested by the cache, and is more compatible with other
services that might spy on Kuma's cache entries (eg. KumaScript)
"""
modified_epoch = self.modified.strftime('%s')
memcache.set(self.last_modified_cache_key, modified_epoch)
return modified_epoch
def save(self, *args, **kwargs):
self.is_template = self.slug.startswith(TEMPLATE_TITLE_PREFIX)
self.is_redirect = bool(self.get_redirect_url())
try:
# Check if the slug would collide with an existing doc
self._raise_if_collides('slug', SlugCollision)
except UniqueCollision as err:
if err.existing.get_redirect_url() is not None:
# If the existing doc is a redirect, delete it and clobber it.
err.existing.delete()
else:
raise err
# These are too important to leave to a (possibly omitted) is_valid
# call:
self._clean_is_localizable()
if not self.parent_topic and self.parent:
# If this is a translation without a topic parent, try to get one.
self.acquire_translated_topic_parent()
super(Document, self).save(*args, **kwargs)
# Delete any cached last-modified timestamp.
self.fill_last_modified_cache()
def delete(self, *args, **kwargs):
if waffle.switch_is_active('wiki_error_on_delete'):
# bug 863692: Temporary while we investigate disappearing pages.
raise Exception("Attempt to delete document %s: %s" %
(self.id, self.title))
else:
if self.is_redirect or 'purge' in kwargs:
if 'purge' in kwargs:
kwargs.pop('purge')
return super(Document, self).delete(*args, **kwargs)
signals.pre_delete.send(sender=self.__class__,
instance=self)
if not self.deleted:
Document.objects.filter(pk=self.pk).update(deleted=True)
memcache.delete(self.last_modified_cache_key)
signals.post_delete.send(sender=self.__class__, instance=self)
def purge(self):
if waffle.switch_is_active('wiki_error_on_delete'):
# bug 863692: Temporary while we investigate disappearing pages.
raise Exception("Attempt to purge document %s: %s" %
(self.id, self.title))
else:
if not self.deleted:
raise Exception("Attempt tp purge non-deleted document %s: %s" %
(self.id, self.title))
self.delete(purge=True)
def restore(self):
"""
Restores a logically deleted document by reverting the deleted
boolean to False. Sends pre_save and post_save Django signals to
follow ducktyping best practices.
"""
if not self.deleted:
raise Exception("Document is not deleted, cannot be restored.")
signals.pre_save.send(sender=self.__class__, instance=self)
Document.deleted_objects.filter(pk=self.pk).update(deleted=False)
signals.post_save.send(sender=self.__class__, instance=self)
def _post_move_redirects(self, new_slug, user, title):
"""
Create and return a Document and a Revision to serve as
redirects once this page has been moved.
"""
redirect_doc = Document(locale=self.locale,
title=self.title,
slug=self.slug,
is_localizable=False)
content = REDIRECT_CONTENT % {
'href': reverse('wiki.document',
args=[new_slug],
locale=self.locale),
'title': title,
}
redirect_rev = Revision(content=content,
is_approved=True,
toc_depth=self.current_revision.toc_depth,
creator=user)
return redirect_doc, redirect_rev
def _moved_revision(self, new_slug, user, title=None):
"""
Create and return a Revision which is a copy of this
Document's current Revision, as it will exist at a moved
location.
"""
moved_rev = self.current_revision
# Shortcut trick for getting an object with all the same
# values, but making Django think it's new.
moved_rev.id = None
moved_rev.creator = user
moved_rev.created = datetime.now()
moved_rev.slug = new_slug
if title:
moved_rev.title = title
return moved_rev
def _get_new_parent(self, new_slug):
"""
Get this moved Document's parent doc if a Document
exists at the appropriate slug and locale.
"""
return valid_slug_parent(new_slug, self.locale)
def _move_conflicts(self, new_slug):
"""
Given a new slug to be assigned to this document, check
whether there is an existing, non-redirect, Document at that
slug in this locale. Any redirect existing there will be
deleted.
This is necessary since page moving is a background task, and
a Document may come into existence at the target slug after
the move is requested.
"""
existing = None
try:
existing = Document.objects.get(locale=self.locale,
slug=new_slug)
except Document.DoesNotExist:
pass
if existing is not None:
if existing.is_redirect:
existing.delete()
else:
raise Exception("Requested move would overwrite a non-redirect page.")
def _tree_conflicts(self, new_slug):
"""
Given a new slug to be assigned to this document, return a
list of documents (if any) which would be overwritten by
moving this document or any of its children in that fashion.
"""
conflicts = []
try:
existing = Document.objects.get(locale=self.locale, slug=new_slug)
if not existing.is_redirect:
conflicts.append(existing)
except Document.DoesNotExist:
pass
for child in self.get_descendants():
child_title = child.slug.split('/')[-1]
try:
slug = '/'.join([new_slug, child_title])
existing = Document.objects.get(locale=self.locale, slug=slug)
if not existing.get_redirect_url():
conflicts.append(existing)
except Document.DoesNotExist:
pass
return conflicts
def _move_tree(self, new_slug, user=None, title=None):
"""
Move this page and all its children.
"""
# Page move is a 10-step process.
#
# Step 1: Sanity check. Has a page been created at this slug
# since the move was requested? If not, OK to go ahead and
# change our slug.
self._move_conflicts(new_slug)
if user is None:
user = self.current_revision.creator
if title is None:
title = self.title
# Step 2: stash our current review tags, since we want to
# preserve them.
review_tags = list(self.current_revision.review_tags.names())
# Step 3: Create (but don't yet save) a Document and Revision
# to leave behind as a redirect from old location to new.
redirect_doc, redirect_rev = self._post_move_redirects(new_slug,
user,
title)
# Step 4: Update our breadcrumbs.
new_parent = self._get_new_parent(new_slug)
# If we found a Document at what will be our parent slug, set
# it as our parent_topic. If we didn't find one, then we no
# longer have a parent_topic (since our original parent_topic
# would already have moved if it were going to).
self.parent_topic = new_parent
# Step 5: Save this Document.
self.slug = new_slug
self.save()
# Step 6: Create (but don't yet save) a copy of our current
# revision, but with the new slug and title (if title is
# changing too).
moved_rev = self._moved_revision(new_slug, user, title)
# Step 7: Save the Revision that actually moves us.
moved_rev.save(force_insert=True)
# Step 8: Save the review tags.
moved_rev.review_tags.set(*review_tags)
# Step 9: Save the redirect.
redirect_doc.save()
redirect_rev.document = redirect_doc
redirect_rev.save()
# Finally, step 10: recurse through all of our children.
for child in self.children.filter(locale=self.locale):
# Save the original slug and locale so we can use them in
# the error message if something goes wrong.
old_child_slug, old_child_locale = child.slug, child.locale
child_title = child.slug.split('/')[-1]
try:
child._move_tree('/'.join([new_slug, child_title]), user)
except PageMoveError:
# A child move already caught this and created the
# correct exception + error message, so just propagate
# it up.
raise
except Exception as e:
# One of the immediate children of this page failed to
# move.
exc_class, exc_message, exc_tb = sys.exc_info()
message = """
Failure occurred while attempting to move document
with id %(doc_id)s.
That document can be viewed at:
https://developer.mozilla.org/%(locale)s/docs/%(slug)s
The exception raised was:
Exception type: %(exc_class)s
Exception message: %(exc_message)s
Full traceback:
%(traceback)s
""" % {'doc_id': child.id,
'locale': old_child_locale,
'slug': old_child_slug,
'exc_class': exc_class,
'exc_message': exc_message,
'traceback': traceback.format_exc(e)}
raise PageMoveError(message)
def repair_breadcrumbs(self):
"""
Temporary method while we work out the real issue behind
translation/breadcrumb mismatches (bug 900961).
Basically just walks up the tree of topical parents, calling
acquire_translated_topic_parent() for as long as there's a
language mismatch.
"""
if (not self.parent_topic or
self.parent_topic.locale != self.locale):
self.acquire_translated_topic_parent()
if self.parent_topic:
self.parent_topic.repair_breadcrumbs()
def acquire_translated_topic_parent(self):
"""
This normalizes topic breadcrumb paths between locales.
Attempt to acquire a topic parent from a translation of our translation
parent's topic parent, auto-creating a stub document if necessary.
"""
if not self.parent:
# Bail, if this is not in fact a translation.
return
parent_topic = self.parent.parent_topic
if not parent_topic:
# Bail, if the translation parent has no topic parent
return
try:
# Look for an existing translation of the topic parent
new_parent = parent_topic.translations.get(locale=self.locale)
except Document.DoesNotExist:
try:
# No luck. As a longshot, let's try looking for the same slug.
new_parent = Document.objects.get(locale=self.locale,
slug=parent_topic.slug)
if not new_parent.parent:
# HACK: This same-slug/different-locale doc should probably
# be considered a translation. Let's correct that on the
# spot.
new_parent.parent = parent_topic
new_parent.save()
except Document.DoesNotExist:
# Finally, let's create a translated stub for a topic parent
new_parent = Document.objects.get(pk=parent_topic.pk)
new_parent.pk = None
new_parent.current_revision = None
new_parent.parent_topic = None
new_parent.parent = parent_topic
new_parent.locale = self.locale
new_parent.save()
if parent_topic.current_revision:
# Don't forget to clone a current revision
new_rev = Revision.objects.get(pk=parent_topic.current_revision.pk)
new_rev.pk = None
new_rev.document = new_parent
# HACK: Let's auto-add tags that flag this as a topic stub
stub_tags = '"TopicStub","NeedsTranslation"'
stub_l10n_tags = ['inprogress']
if new_rev.tags:
new_rev.tags = '%s,%s' % (new_rev.tags, stub_tags)
else:
new_rev.tags = stub_tags
new_rev.save()
new_rev.localization_tags.add(*stub_l10n_tags)
# Finally, assign the new default parent topic
self.parent_topic = new_parent
self.save()
@property
def content_parsed(self):
if not self.current_revision:
return None
return self.current_revision.content_parsed
def populate_attachments(self):
"""
File attachments are stored at the DB level and synced here
with the document's HTML content.
We find them by regex-searching over the HTML for URLs that match the
file URL patterns.
"""
mt_files = DEKI_FILE_URL.findall(self.html)
kuma_files = KUMA_FILE_URL.findall(self.html)
params = None
if mt_files:
# We have at least some MindTouch files.
params = models.Q(mindtouch_attachment_id__in=mt_files)
if kuma_files:
# We also have some kuma files. Use an OR query.
params = params | models.Q(id__in=kuma_files)
if kuma_files and not params:
# We have only kuma files.
params = models.Q(id__in=kuma_files)
Attachment = apps.get_model('attachments', 'Attachment')
if params:
found_attachments = Attachment.objects.filter(params)
else:
# If no files found, return an empty Attachment queryset.
found_attachments = Attachment.objects.none()
# Delete all document-attachments-relations for attachments that
# weren't originally uploaded for the document to populate the list
# again below
self.attached_files.filter(is_original=False).delete()
# Reset the linked status for all attachments that are left
self.attached_files.all().update(is_linked=False)
# Go through the attachments discovered in the HTML and
# create linked attachments
"""
three options of state:
- linked in the document, but not originally uploaded
- linked in the document and originally uploaded
- not linked in the document, but originally uploaded
"""
populated = []
for attachment in (found_attachments.only('pk', 'current_revision')
.iterator()):
revision = attachment.current_revision
relation, created = self.files.through.objects.update_or_create(
file_id=attachment.pk,
document_id=self.pk,
defaults={
'attached_by': revision.creator,
'name': revision.filename,
'is_linked': True,
},
)
populated.append((relation, created))
return populated
@property
def show_toc(self):
return self.current_revision and self.current_revision.toc_depth
@cached_property
def language(self):
return get_language_mapping()[self.locale.lower()]
def get_absolute_url(self, endpoint='wiki.document'):
"""
Build the absolute URL to this document from its full path
"""
return reverse(endpoint, locale=self.locale, args=[self.slug])
def get_edit_url(self):
return self.get_absolute_url(endpoint='wiki.edit')
def get_redirect_url(self):
"""
If I am a redirect, return the absolute URL to which I redirect.
Otherwise, return None.
"""
# If a document starts with REDIRECT_HTML and contains any <a> tags
# with hrefs, return the href of the first one. This trick saves us
# from having to parse the HTML every time.
if REDIRECT_HTML in self.html:
anchors = PyQuery(self.html)('a[href].redirect')
if anchors:
url = anchors[0].get('href')
# allow explicit domain and *not* '//'
# i.e allow "https://developer...." and "/en-US/docs/blah"
if len(url) > 1:
if url.startswith(settings.SITE_URL):
return url
elif url[0] == '/' and url[1] != '/':
return url
elif len(url) == 1 and url[0] == '/':
return url
def get_topic_parents(self):
"""Build a list of parent topics from self to root"""
curr, parents = self, []
while curr.parent_topic:
curr = curr.parent_topic
parents.append(curr)
return parents
def allows_revision_by(self, user):
"""
Return whether `user` is allowed to create new revisions of me.
The motivation behind this method is that templates and other types of
docs may have different permissions.
"""
if (self.slug.startswith(TEMPLATE_TITLE_PREFIX) and
not user.has_perm('wiki.change_template_document')):
return False
return True
def allows_editing_by(self, user):
"""
Return whether `user` is allowed to edit document-level metadata.
If the Document doesn't have a current_revision (nothing approved) then
all the Document fields are still editable. Once there is an approved
Revision, the Document fields can only be edited by privileged users.
"""
if (self.slug.startswith(TEMPLATE_TITLE_PREFIX) and
not user.has_perm('wiki.change_template_document')):
return False
return (not self.current_revision or
user.has_perm('wiki.change_document'))
def translated_to(self, locale):
"""
Return the translation of me to the given locale.
If there is no such Document, return None.
"""
if self.locale != settings.WIKI_DEFAULT_LANGUAGE:
raise NotImplementedError('translated_to() is implemented only on'
'Documents in the default language so'
'far.')
try:
return Document.objects.get(locale=locale, parent=self)
except Document.DoesNotExist:
return None
@property
def original(self):
"""
Return the document I was translated from or, if none, myself.
"""
return self.parent or self
@cached_property
def other_translations(self):
"""
Return a list of Documents - other translations of this Document
"""
if self.parent is None:
return self.translations.all().order_by('locale')
else:
translations = (self.parent.translations.all()
.exclude(id=self.id)
.order_by('locale'))
pks = list(translations.values_list('pk', flat=True))
return Document.objects.filter(pk__in=[self.parent.pk] + pks)
@property
def parents(self):
"""
Return the list of topical parent documents above this one,
or an empty list if none exist.
"""
if self.parent_topic is None:
return []
current_parent = self.parent_topic
parents = [current_parent]
while current_parent.parent_topic is not None:
parents.insert(0, current_parent.parent_topic)
current_parent = current_parent.parent_topic
return parents
def is_child_of(self, other):
"""
Circular dependency detection -- if someone tries to set
this as a parent of a document it's a child of, they're gonna
have a bad time.
"""
return other.id in (d.id for d in self.parents)
# This is a method, not a property, because it can do a lot of DB
# queries and so should look scarier. It's not just named
# 'children' because that's taken already by the reverse relation
# on parent_topic.
def get_descendants(self, limit=None, levels=0):
"""
Return a list of all documents which are children
(grandchildren, great-grandchildren, etc.) of this one.
"""
results = []
if (limit is None or levels < limit) and self.children.exists():
for child in self.children.all().filter(locale=self.locale):
results.append(child)
[results.append(grandchild)
for grandchild in child.get_descendants(limit, levels + 1)]
return results
def is_watched_by(self, user):
"""
Return whether `user` is notified of edits to me.
"""
from .events import EditDocumentEvent
return EditDocumentEvent.is_notifying(user, self)
def tree_is_watched_by(self, user):
"""Return whether `user` is notified of edits to me AND sub-pages."""
from .events import EditDocumentInTreeEvent
return EditDocumentInTreeEvent.is_notifying(user, self)
def parent_trees_watched_by(self, user):
"""
Return any and all of this document's parents that are watched by the
given user.
"""
return [doc for doc in self.parents if doc.tree_is_watched_by(user)]
@cached_property
def contributors(self):
return DocumentContributorsJob().get(self.pk)
@cached_property
def zone_stack(self):
return DocumentZoneStackJob().get(self.pk)
def get_full_url(self):
return absolutify(self.get_absolute_url())
class DocumentDeletionLog(models.Model):
"""
Log of who deleted a Document, when, and why.
"""
# We store the locale/slug because it's unique, and also because a
# ForeignKey would delete this log when the Document gets purged.
locale = models.CharField(
max_length=7,
choices=settings.LANGUAGES,
default=settings.WIKI_DEFAULT_LANGUAGE,
db_index=True,
)
slug = models.CharField(max_length=255, db_index=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
timestamp = models.DateTimeField(auto_now=True)
reason = models.TextField()
def __unicode__(self):
return "/%(locale)s/%(slug)s deleted by %(user)s" % {
'locale': self.locale,
'slug': self.slug,
'user': self.user
}
class DocumentZone(models.Model):
"""
Model object declaring a content zone root at a given Document, provides
attributes inherited by the topic hierarchy beneath it.
"""
document = models.OneToOneField(Document, related_name='zone')
styles = models.TextField(null=True, blank=True)
url_root = models.CharField(
max_length=255, null=True, blank=True, db_index=True,
help_text="alternative URL path root for documents under this zone")
def __unicode__(self):
return u'DocumentZone %s (%s)' % (self.document.get_absolute_url(),
self.document.title)
class ReviewTag(TagBase):
"""A tag indicating review status, mainly for revisions"""
class Meta:
verbose_name = _('Review Tag')
verbose_name_plural = _('Review Tags')
class LocalizationTag(TagBase):
"""A tag indicating localization status, mainly for revisions"""
class Meta:
verbose_name = _('Localization Tag')
verbose_name_plural = _('Localization Tags')
class ReviewTaggedRevision(ItemBase):
"""Through model, just for review tags on revisions"""
content_object = models.ForeignKey('Revision')
tag = models.ForeignKey(ReviewTag, related_name="%(app_label)s_%(class)s_items")
@classmethod
def tags_for(cls, *args, **kwargs):
return tags_for(cls, *args, **kwargs)
class LocalizationTaggedRevision(ItemBase):
"""Through model, just for localization tags on revisions"""
content_object = models.ForeignKey('Revision')
tag = models.ForeignKey(LocalizationTag, related_name="%(app_label)s_%(class)s_items")
@classmethod
def tags_for(cls, *args, **kwargs):
return tags_for(cls, *args, **kwargs)
class Revision(models.Model):
"""A revision of a localized knowledgebase document"""
# Depth of table-of-contents in document display.
TOC_DEPTH_NONE = 0
TOC_DEPTH_ALL = 1
TOC_DEPTH_H2 = 2
TOC_DEPTH_H3 = 3
TOC_DEPTH_H4 = 4
TOC_DEPTH_CHOICES = (
(TOC_DEPTH_NONE, _(u'No table of contents')),
(TOC_DEPTH_ALL, _(u'All levels')),
(TOC_DEPTH_H2, _(u'H2 and higher')),
(TOC_DEPTH_H3, _(u'H3 and higher')),
(TOC_DEPTH_H4, _('H4 and higher')),
)
document = models.ForeignKey(Document, related_name='revisions')
# Title and slug in document are primary, but they're kept here for
# revision history.
title = models.CharField(max_length=255, null=True, db_index=True)
slug = models.CharField(max_length=255, null=True, db_index=True)
summary = models.TextField() # wiki markup
content = models.TextField() # wiki markup
tidied_content = models.TextField(blank=True) # wiki markup tidied up
# Keywords are used mostly to affect search rankings. Moderators may not
# have the language expertise to translate keywords, so we put them in the
# Revision so the translators can handle them:
keywords = models.CharField(max_length=255, blank=True)
# Tags are stored in a Revision as a plain CharField, because Revisions are
# not indexed by tags. This data is retained for history tracking.
tags = models.CharField(max_length=255, blank=True)
# Tags are (ab)used as status flags and for searches, but the through model
# should constrain things from getting expensive.
review_tags = TaggableManager(through=ReviewTaggedRevision)
localization_tags = TaggableManager(through=LocalizationTaggedRevision)
toc_depth = models.IntegerField(choices=TOC_DEPTH_CHOICES,
default=TOC_DEPTH_ALL)
# Maximum age (in seconds) before this document needs re-rendering
render_max_age = models.IntegerField(blank=True, null=True)
created = models.DateTimeField(default=datetime.now, db_index=True)
comment = models.CharField(max_length=255)
creator = models.ForeignKey(settings.AUTH_USER_MODEL,
related_name='created_revisions')
is_approved = models.BooleanField(default=True, db_index=True)
# The default locale's rev that was current when the Edit button was hit to
# create this revision. Used to determine whether localizations are out of
# date.
based_on = models.ForeignKey('self', null=True, blank=True)
# TODO: limit_choices_to={'document__locale':
# settings.WIKI_DEFAULT_LANGUAGE} is a start but not sufficient.
is_mindtouch_migration = models.BooleanField(default=False, db_index=True,
help_text="Did this revision come from MindTouch?")
objects = TransformManager()
def get_absolute_url(self):
"""Build the absolute URL to this revision"""
return reverse('wiki.revision',
locale=self.document.locale,
args=[self.document.slug, self.pk])
def _based_on_is_clean(self):
"""Return a tuple: (the correct value of based_on, whether the old
value was correct).
based_on must be an approved revision of the English version of the
document if there are any such revisions, any revision if no
approved revision exists, and None otherwise. If based_on is not
already set when this is called, the return value defaults to the
current_revision of the English document.
"""
# TODO(james): This could probably be simplified down to "if
# based_on is set, it must be a revision of the original document."
original = self.document.original
base = original.current_or_latest_revision()
has_approved = original.revisions.filter(is_approved=True).exists()
if (original.current_revision or not has_approved):
if (self.based_on and self.based_on.document != original):
# based_on is set and points to the wrong doc.
return base, False
# Else based_on is valid; leave it alone.
elif self.based_on:
return None, False
return self.based_on, True
def clean(self):
"""Ensure based_on is valid."""
# All of the cleaning herein should be unnecessary unless the user
# messes with hidden form data.
try:<|fim▁hole|> # For clean()ing forms that don't have a document instance behind
# them yet
self.based_on = None
else:
based_on, is_clean = self._based_on_is_clean()
if not is_clean:
if self.document.parent:
# Restoring translation source, so base on current_revision
self.based_on = self.document.parent.current_revision
else:
old = self.based_on
self.based_on = based_on # Guess a correct value.
locale = settings.LOCALES[settings.WIKI_DEFAULT_LANGUAGE].native
error = ugettext(
'A revision must be based on a revision of the '
'%(locale)s document. Revision ID %(id)s does '
'not fit those criteria.')
raise ValidationError(error %
{'locale': locale, 'id': old.id})
def save(self, *args, **kwargs):
_, is_clean = self._based_on_is_clean()
if not is_clean: # No more Mister Nice Guy
# TODO(erik): This error message ignores non-translations.
raise ProgrammingError('Revision.based_on must be None or refer '
'to a revision of the default-'
'language document. It was %s' %
self.based_on)
if not self.title:
self.title = self.document.title
if not self.slug:
self.slug = self.document.slug
super(Revision, self).save(*args, **kwargs)
# When a revision is approved, update document metadata and re-cache
# the document's html content
if self.is_approved:
self.make_current()
def make_current(self):
"""
Make this revision the current one for the document
"""
self.document.title = self.title
self.document.slug = self.slug
self.document.html = self.content_cleaned
self.document.render_max_age = self.render_max_age
self.document.current_revision = self
# Since Revision stores tags as a string, we need to parse them first
# before setting on the Document.
self.document.tags.set(*parse_tags(self.tags))
self.document.save()
# Re-create all document-attachment relations since they are based
# on the actual HTML content
self.document.populate_attachments()
def __unicode__(self):
return u'[%s] %s #%s' % (self.document.locale,
self.document.title,
self.id)
def get_section_content(self, section_id):
"""Convenience method to extract the content for a single section"""
return self.document.extract.section(self.content, section_id)
def get_tidied_content(self, allow_none=False):
"""
Return the revision content parsed and cleaned by tidy.
First, check in denormalized db field. If it's not available, schedule
an asynchronous task to store it.
allow_none -- To prevent CPU-hogging calls, return None instead of
calling tidy_content in-process.
"""
# we may be lucky and have the tidied content already denormalized
# in the database, if so return it
if self.tidied_content:
tidied_content = self.tidied_content
else:
if allow_none:
if self.pk:
from .tasks import tidy_revision_content
tidy_revision_content.delay(self.pk, refresh=False)
tidied_content = None
else:
tidied_content, errors = tidy_content(self.content)
if self.pk:
Revision.objects.filter(pk=self.pk).update(
tidied_content=tidied_content)
self.tidied_content = tidied_content or ''
return tidied_content
@property
def content_cleaned(self):
if self.document.is_template:
return self.content
else:
return Document.objects.clean_content(self.content)
@cached_property
def previous(self):
return self.get_previous()
def get_previous(self):
"""
Returns the previous approved revision or None.
"""
try:
return self.document.revisions.filter(
is_approved=True,
created__lt=self.created,
).order_by('-created')[0]
except IndexError:
return None
@cached_property
def needs_editorial_review(self):
return self.review_tags.filter(name='editorial').exists()
@cached_property
def needs_technical_review(self):
return self.review_tags.filter(name='technical').exists()
@cached_property
def localization_in_progress(self):
return self.localization_tags.filter(name='inprogress').exists()
@property
def translation_age(self):
return abs((datetime.now() - self.created).days)
class RevisionIP(models.Model):
"""
IP Address for a Revision including User-Agent string and Referrer URL.
"""
revision = models.ForeignKey(
Revision
)
ip = models.CharField(
_('IP address'),
max_length=40,
editable=False,
db_index=True,
blank=True,
null=True,
)
user_agent = models.TextField(
_('User-Agent'),
editable=False,
blank=True,
)
referrer = models.TextField(
_('HTTP Referrer'),
editable=False,
blank=True,
)
data = models.TextField(
editable=False,
blank=True,
null=True,
verbose_name=_('Data submitted to Akismet')
)
objects = RevisionIPManager()
def __unicode__(self):
return '%s (revision %d)' % (self.ip or 'No IP', self.revision.id)
class RevisionAkismetSubmission(AkismetSubmission):
"""
The Akismet submission per wiki document revision.
Stores only a reference to the submitted revision.
"""
revision = models.ForeignKey(
Revision,
related_name='akismet_submissions',
null=True,
blank=True,
verbose_name=_('Revision'),
# don't delete the akismet submission but set the revision to null
on_delete=models.SET_NULL,
)
class Meta:
verbose_name = _('Akismet submission')
verbose_name_plural = _('Akismet submissions')
def __unicode__(self):
if self.revision:
return (
u'%(type)s submission by %(sender)s (Revision %(revision_id)d)' % {
'type': self.get_type_display(),
'sender': self.sender,
'revision_id': self.revision.id,
}
)
else:
return (
u'%(type)s submission by %(sender)s (no revision)' % {
'type': self.get_type_display(),
'sender': self.sender,
}
)
class EditorToolbar(models.Model):
creator = models.ForeignKey(settings.AUTH_USER_MODEL,
related_name='created_toolbars')
default = models.BooleanField(default=False)
name = models.CharField(max_length=100)
code = models.TextField(max_length=2000)
def __unicode__(self):
return self.name
class DocumentSpamAttempt(SpamAttempt):
"""
The wiki document specific spam attempt.
Stores title, slug and locale of the documet revision to be able
to see where it happens. Stores data sent to Akismet so that staff can
review Akismet's spam detection for false positives.
"""
title = models.CharField(
verbose_name=_('Title'),
max_length=255,
)
slug = models.CharField(
verbose_name=_('Slug'),
max_length=255,
)
document = models.ForeignKey(
Document,
related_name='spam_attempts',
null=True,
blank=True,
verbose_name=_('Document (optional)'),
on_delete=models.SET_NULL,
)
data = models.TextField(
editable=False,
blank=True,
null=True,
verbose_name=_('Data submitted to Akismet')
)
reviewed = models.DateTimeField(
_('reviewed'),
blank=True,
null=True,
)
NEEDS_REVIEW = 0
HAM = 1
SPAM = 2
REVIEW_UNAVAILABLE = 3
AKISMET_ERROR = 4
REVIEW_CHOICES = (
(NEEDS_REVIEW, _('Needs Review')),
(HAM, _('Ham / False Positive')),
(SPAM, _('Confirmed as Spam')),
(REVIEW_UNAVAILABLE, _('Review Unavailable')),
(AKISMET_ERROR, _('Akismet Error')),
)
review = models.IntegerField(
choices=REVIEW_CHOICES,
default=NEEDS_REVIEW,
verbose_name=_("Review of Akismet's classification as spam"),
)
reviewer = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='documentspam_reviewed',
blank=True,
null=True,
verbose_name=_('Staff reviewer'),
)
def __unicode__(self):
return u'%s (%s)' % (self.slug, self.title)<|fim▁end|> | self.document and self.document.original
except Document.DoesNotExist: |
<|file_name|>transition.js<|end_file_name|><|fim▁begin|>import Ember from "ember";
export default Ember.Object.extend({
targetRoute: null,
params: null,
isSingleResource: Ember.computed.match(
'targetRoute',<|fim▁hole|> hasParams: Ember.computed.bool('params')
});<|fim▁end|> | /(?:^(?!.*s\b))(?:^(?!index)).*$/
), |
<|file_name|>x86.rs<|end_file_name|><|fim▁begin|>// This file is part of libfringe, a low-level green threading library.
// Copyright (c) Nathan Zadoks <[email protected]>,
// whitequark <[email protected]>
// Amanieu d'Antras <[email protected]>
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
// To understand the machine code in this file, keep in mind these facts:
// * i686 SysV C ABI requires the stack to be aligned at function entry,
// so that `%esp+4` is a multiple of 16. Aligned operands are a requirement
// of SIMD instructions, and making this the responsibility of the caller
// avoids having to maintain a frame pointer, which is necessary when
// a function has to realign the stack from an unknown state.
// * i686 SysV C ABI passes the first argument on the stack. This is
// unfortunate, because unlike every other architecture we can't reuse
// `swap` for the initial call, and so we use a trampoline.
//
// To understand the DWARF CFI code in this file, keep in mind these facts:
// * CFI is "call frame information"; a set of instructions to a debugger or
// an unwinder that allow it to simulate returning from functions. This implies
// restoring every register to its pre-call state, as well as the stack pointer.
// * CFA is "call frame address"; the value of stack pointer right before the call
// instruction in the caller. Everything strictly below CFA (and inclusive until
// the next CFA) is the call frame of the callee. This implies that the return
// address is the part of callee's call frame.
// * Logically, DWARF CFI is a table where rows are instruction pointer values and
// columns describe where registers are spilled (mostly using expressions that
// compute a memory location as CFA+n). A .cfi_offset pseudoinstruction changes
// the state of a column for all IP numerically larger than the one it's placed
// after. A .cfi_def_* pseudoinstruction changes the CFA value similarly.
// * Simulating return is as easy as restoring register values from the CFI table
// and then setting stack pointer to CFA.
//
// A high-level overview of the function of the trampolines when unwinding is:
// * The 2nd init trampoline puts a controlled value (written in swap to `new_cfa`)
// into %ebp. This is then used as the CFA for the 1st trampoline.
// * This controlled value points to the bottom of the stack of the parent context,
// which holds the saved %ebp and return address from the call to swap().
// * The 1st init trampoline tells the unwinder to restore %ebp and its return
// address from the stack frame at %ebp (in the parent stack), thus continuing
// unwinding at the swap call site instead of falling off the end of context stack.
use core::mem;
use stack::Stack;
use stack_pointer::StackPointer;
pub const STACK_ALIGNMENT: usize = 16;
pub unsafe fn init(sp: &mut StackPointer,
f: unsafe extern "C" fn(usize, StackPointer) -> !) {
#[cfg(not(target_vendor = "apple"))]
#[naked]
unsafe extern "C" fn trampoline_1() {
asm!(
r#"
# gdb has a hardcoded check that rejects backtraces where frame addresses
# do not monotonically decrease. It is turned off if the function is called
# "__morestack" and that is hardcoded. So, to make gdb backtraces match
# the actual unwinder behavior, we call ourselves "__morestack" and mark
# the symbol as local; it shouldn't interfere with anything.
__morestack:
.local __morestack
# Set up the first part of our DWARF CFI linking stacks together. When
# we reach this function from unwinding, %ebp will be pointing at the bottom
# of the parent linked stack. This link is set each time swap() is called.
# When unwinding the frame corresponding to this function, a DWARF unwinder
# will use %ebp+8 as the next call frame address, restore return address
# from CFA-4 and restore %ebp from CFA-8. This mirrors what the second half
# of `swap_trampoline` does.
.cfi_def_cfa %ebp, 8
.cfi_offset %ebp, -8
# This nop is here so that the initial swap doesn't return to the start
# of the trampoline, which confuses the unwinder since it will look for
# frame information in the previous symbol rather than this one. It is
# never actually executed.
nop
# Stack unwinding in some versions of libunwind doesn't seem to like
# 1-byte symbols, so we add a second nop here. This instruction isn't
# executed either, it is only here to pad the symbol size.
nop
.Lend:
.size __morestack, .Lend-__morestack
"#
: : : : "volatile")
}
#[cfg(target_vendor = "apple")]
#[naked]
unsafe extern "C" fn trampoline_1() {
asm!(
r#"
# Identical to the above, except avoids .local/.size that aren't available on Mach-O.
__morestack:
.private_extern __morestack
.cfi_def_cfa %ebp, 8<|fim▁hole|> "#
: : : : "volatile")
}
#[naked]
unsafe extern "C" fn trampoline_2() {
asm!(
r#"
# Set up the second part of our DWARF CFI.
# When unwinding the frame corresponding to this function, a DWARF unwinder
# will restore %ebp (and thus CFA of the first trampoline) from the stack slot.
# This stack slot is updated every time swap() is called to point to the bottom
# of the stack of the context switch just switched from.
.cfi_def_cfa %ebp, 8
.cfi_offset %ebp, -8
# This nop is here so that the return address of the swap trampoline
# doesn't point to the start of the symbol. This confuses gdb's backtraces,
# causing them to think the parent function is trampoline_1 instead of
# trampoline_2.
nop
# Push arguments.
pushl %esi
pushl %edi
# Call the provided function.
calll *16(%esp)
"#
: : : : "volatile")
}
// We set up the stack in a somewhat special way so that to the unwinder it
// looks like trampoline_1 has called trampoline_2, which has in turn called
// swap::trampoline.
//
// There are 2 call frames in this setup, each containing the return address
// followed by the %ebp value for that frame. This setup supports unwinding
// using DWARF CFI as well as the frame pointer-based unwinding used by tools
// such as perf or dtrace.
sp.push(0 as usize); // Padding to ensure the stack is properly aligned
sp.push(0 as usize); // Padding to ensure the stack is properly aligned
sp.push(0 as usize); // Padding to ensure the stack is properly aligned
sp.push(f as usize); // Function that trampoline_2 should call
// Call frame for trampoline_2. The CFA slot is updated by swap::trampoline
// each time a context switch is performed.
sp.push(trampoline_1 as usize + 2); // Return after the 2 nops
sp.push(0xdead0cfa); // CFA slot
// Call frame for swap::trampoline. We set up the %ebp value to point to the
// parent call frame.
let frame = *sp;
sp.push(trampoline_2 as usize + 1); // Entry point
sp.push(frame.0 as usize); // Pointer to parent call frame
}
#[inline(always)]
pub unsafe fn swap(arg: usize, new_sp: StackPointer,
new_stack: Option<&Stack>) -> (usize, StackPointer) {
// Address of the topmost CFA stack slot.
let mut dummy: usize = mem::uninitialized();
let new_cfa = if let Some(new_stack) = new_stack {
(new_stack.base() as *mut usize).offset(-6)
} else {
// Just pass a dummy pointer if we aren't linking the stack
&mut dummy
};
#[naked]
unsafe extern "C" fn trampoline() {
asm!(
r#"
# Save frame pointer explicitly; the unwinder uses it to find CFA of
# the caller, and so it has to have the correct value immediately after
# the call instruction that invoked the trampoline.
pushl %ebp
.cfi_adjust_cfa_offset 4
.cfi_rel_offset %ebp, 0
# Link the call stacks together by writing the current stack bottom
# address to the CFA slot in the new stack.
movl %esp, (%ecx)
# Pass the stack pointer of the old context to the new one.
movl %esp, %esi
# Load stack pointer of the new context.
movl %edx, %esp
# Restore frame pointer of the new context.
popl %ebp
.cfi_adjust_cfa_offset -4
.cfi_restore %ebp
# Return into the new context. Use `pop` and `jmp` instead of a `ret`
# to avoid return address mispredictions (~8ns per `ret` on Ivy Bridge).
popl %eax
.cfi_adjust_cfa_offset -4
.cfi_register %eip, %eax
jmpl *%eax
"#
: : : : "volatile")
}
let ret: usize;
let ret_sp: *mut usize;
asm!(
r#"
# Push instruction pointer of the old context and switch to
# the new context.
call ${2:c}
"#
: "={edi}" (ret)
"={esi}" (ret_sp)
: "s" (trampoline as usize)
"{edi}" (arg)
"{edx}" (new_sp.0)
"{ecx}" (new_cfa)
: "eax", "ebx", "ecx", "edx", /*"esi", "edi", "ebp", "esp",*/
"mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7",
"xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7",
"cc", "dirflag", "fpsr", "flags", "memory"
: "volatile");
(ret, StackPointer(ret_sp))
}<|fim▁end|> | .cfi_offset %ebp, -8
nop
nop |
<|file_name|>demo_active_information_storage.py<|end_file_name|><|fim▁begin|># Import classes
from idtxl.active_information_storage import ActiveInformationStorage
from idtxl.data import Data
# a) Generate test data
data = Data()<|fim▁hole|># b) Initialise analysis object and define settings
network_analysis = ActiveInformationStorage()
settings = {'cmi_estimator': 'JidtGaussianCMI',
'max_lag': 5}
# c) Run analysis
results = network_analysis.analyse_network(settings=settings, data=data)
# d) Plot list of processes with significant AIS to console
print(results.get_significant_processes(fdr=False))<|fim▁end|> | data.generate_mute_data(n_samples=1000, n_replications=5)
|
<|file_name|>HTTPDownstreamSessionTest.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#include <folly/Conv.h>
#include <folly/Foreach.h>
#include <folly/wangle/acceptor/ConnectionManager.h>
#include <folly/io/Cursor.h>
#include <folly/io/async/EventBase.h>
#include <folly/io/async/EventBaseManager.h>
#include <folly/io/async/TimeoutManager.h>
#include <gtest/gtest.h>
#include <proxygen/lib/http/codec/test/MockHTTPCodec.h>
#include <proxygen/lib/http/codec/test/TestUtils.h>
#include <proxygen/lib/http/session/HTTPDirectResponseHandler.h>
#include <proxygen/lib/http/session/HTTPDownstreamSession.h>
#include <proxygen/lib/http/session/HTTPSession.h>
#include <proxygen/lib/http/session/test/HTTPSessionMocks.h>
#include <proxygen/lib/http/session/test/HTTPSessionTest.h>
#include <proxygen/lib/http/session/test/MockByteEventTracker.h>
#include <proxygen/lib/http/session/test/TestUtils.h>
#include <proxygen/lib/test/TestAsyncTransport.h>
#include <string>
#include <strstream>
#include <folly/io/async/test/MockAsyncTransport.h>
#include <vector>
using namespace folly::wangle;
using namespace folly;
using namespace proxygen;
using namespace std;
using namespace testing;
struct HTTP1xCodecPair {
typedef HTTP1xCodec Codec;
static const int version = 1;
};
struct HTTP2CodecPair {
typedef HTTP2Codec Codec;
static const int version = 2;
};
struct SPDY2CodecPair {
typedef SPDYCodec Codec;
static const SPDYVersion version = SPDYVersion::SPDY2;
};
struct SPDY3CodecPair {
typedef SPDYCodec Codec;
static const SPDYVersion version = SPDYVersion::SPDY3;
};
struct SPDY3_1CodecPair {
typedef SPDYCodec Codec;
static const SPDYVersion version = SPDYVersion::SPDY3_1;
};
template <typename C>
class HTTPDownstreamTest : public testing::Test {
public:
explicit HTTPDownstreamTest(uint32_t sessionWindowSize = spdy::kInitialWindow)
: eventBase_(),
transport_(new TestAsyncTransport(&eventBase_)),
transactionTimeouts_(makeTimeoutSet(&eventBase_)) {
EXPECT_CALL(mockController_, attachSession(_));
httpSession_ = new HTTPDownstreamSession(
transactionTimeouts_.get(),
std::move(AsyncTransportWrapper::UniquePtr(transport_)),
localAddr, peerAddr,
&mockController_,
std::move(makeServerCodec<typename C::Codec>(
C::version)),
mockTransportInfo /* no stats for now */);
httpSession_->setFlowControl(spdy::kInitialWindow, spdy::kInitialWindow,
sessionWindowSize);
httpSession_->startNow();
}
void SetUp() {
folly::EventBaseManager::get()->clearEventBase();
HTTPSession::setPendingWriteMax(65536);
}
void addSingleByteReads(const char* data,
std::chrono::milliseconds delay={}) {
for (const char* p = data; *p != '\0'; ++p) {
transport_->addReadEvent(p, 1, delay);
}
}
void testPriorities(HTTPCodec& clientCodec, uint32_t numPriorities);
void testChunks(bool trailers);
void parseOutput(HTTPCodec& clientCodec) {
IOBufQueue stream(IOBufQueue::cacheChainLength());
auto writeEvents = transport_->getWriteEvents();
for (auto event: *writeEvents) {
auto vec = event->getIoVec();
for (size_t i = 0; i < event->getCount(); i++) {
unique_ptr<IOBuf> buf(
std::move(IOBuf::wrapBuffer(vec[i].iov_base, vec[i].iov_len)));
stream.append(std::move(buf));
uint32_t consumed = clientCodec.onIngress(*stream.front());
stream.split(consumed);
}
}
EXPECT_EQ(stream.chainLength(), 0);
}
protected:
EventBase eventBase_;
TestAsyncTransport* transport_; // invalid once httpSession_ is destroyed
AsyncTimeoutSet::UniquePtr transactionTimeouts_;
StrictMock<MockController> mockController_;
HTTPDownstreamSession* httpSession_;
};
// Uses TestAsyncTransport
typedef HTTPDownstreamTest<HTTP1xCodecPair> HTTPDownstreamSessionTest;
typedef HTTPDownstreamTest<SPDY2CodecPair> SPDY2DownstreamSessionTest;
typedef HTTPDownstreamTest<SPDY3CodecPair> SPDY3DownstreamSessionTest;
TEST_F(HTTPDownstreamSessionTest, immediate_eof) {
// Send EOF without any request data
EXPECT_CALL(mockController_, getRequestHandler(_, _)).Times(0);
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, http_1_0_no_headers) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler->txn_));
EXPECT_CALL(*handler, onHeadersComplete(_))
.WillOnce(Invoke([&] (std::shared_ptr<HTTPMessage> msg) {
EXPECT_FALSE(msg->getIsChunked());
EXPECT_FALSE(msg->getIsUpgraded());
EXPECT_EQ("/", msg->getURL());
EXPECT_EQ("/", msg->getPath());
EXPECT_EQ("", msg->getQueryString());
EXPECT_EQ(1, msg->getHTTPVersion().first);
EXPECT_EQ(0, msg->getHTTPVersion().second);
}));
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs(handler, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("GET / HTTP/1.0\r\n\r\n",
std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, http_1_0_no_headers_eof) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler->txn_));
EXPECT_CALL(*handler, onHeadersComplete(_))
.WillOnce(Invoke([&] (std::shared_ptr<HTTPMessage> msg) {
EXPECT_FALSE(msg->getIsChunked());
EXPECT_FALSE(msg->getIsUpgraded());
EXPECT_EQ("http://example.com/foo?bar", msg->getURL());
EXPECT_EQ("/foo", msg->getPath());
EXPECT_EQ("bar", msg->getQueryString());
EXPECT_EQ(1, msg->getHTTPVersion().first);
EXPECT_EQ(0, msg->getHTTPVersion().second);
}));
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs(handler, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("GET http://example.com/foo?bar HTTP/1.0\r\n\r\n",
std::chrono::milliseconds(0));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, single_bytes) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler->txn_));
EXPECT_CALL(*handler, onHeadersComplete(_))
.WillOnce(Invoke([&] (std::shared_ptr<HTTPMessage> msg) {
const HTTPHeaders& hdrs = msg->getHeaders();
EXPECT_EQ(2, hdrs.size());
EXPECT_TRUE(hdrs.exists("host"));
EXPECT_TRUE(hdrs.exists("connection"));
EXPECT_FALSE(msg->getIsChunked());
EXPECT_FALSE(msg->getIsUpgraded());
EXPECT_EQ("/somepath.php?param=foo", msg->getURL());
EXPECT_EQ("/somepath.php", msg->getPath());
EXPECT_EQ("param=foo", msg->getQueryString());
EXPECT_EQ(1, msg->getHTTPVersion().first);
EXPECT_EQ(1, msg->getHTTPVersion().second);
}));
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs(handler, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
addSingleByteReads("GET /somepath.php?param=foo HTTP/1.1\r\n"
"Host: example.com\r\n"
"Connection: close\r\n"
"\r\n");
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, single_bytes_with_body) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler->txn_));
EXPECT_CALL(*handler, onHeadersComplete(_))
.WillOnce(Invoke([&] (std::shared_ptr<HTTPMessage> msg) {
const HTTPHeaders& hdrs = msg->getHeaders();
EXPECT_EQ(3, hdrs.size());
EXPECT_TRUE(hdrs.exists("host"));
EXPECT_TRUE(hdrs.exists("content-length"));
EXPECT_TRUE(hdrs.exists("myheader"));
EXPECT_FALSE(msg->getIsChunked());
EXPECT_FALSE(msg->getIsUpgraded());
EXPECT_EQ("/somepath.php?param=foo", msg->getURL());
EXPECT_EQ("/somepath.php", msg->getPath());
EXPECT_EQ("param=foo", msg->getQueryString());
EXPECT_EQ(1, msg->getHTTPVersion().first);
EXPECT_EQ(1, msg->getHTTPVersion().second);
}));
EXPECT_CALL(*handler, onBody(_))
.WillOnce(ExpectString("1"))
.WillOnce(ExpectString("2"))
.WillOnce(ExpectString("3"))
.WillOnce(ExpectString("4"))
.WillOnce(ExpectString("5"));
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs(handler, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
addSingleByteReads("POST /somepath.php?param=foo HTTP/1.1\r\n"
"Host: example.com\r\n"
"MyHeader: FooBar\r\n"
"Content-Length: 5\r\n"
"\r\n"
"12345");
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, split_body) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler->txn_));
EXPECT_CALL(*handler, onHeadersComplete(_))
.WillOnce(Invoke([&] (std::shared_ptr<HTTPMessage> msg) {
const HTTPHeaders& hdrs = msg->getHeaders();
EXPECT_EQ(2, hdrs.size());
}));
EXPECT_CALL(*handler, onBody(_))
.WillOnce(ExpectString("12345"))
.WillOnce(ExpectString("abcde"));
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs(handler, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("POST / HTTP/1.1\r\n"
"Host: example.com\r\n"
"Content-Length: 10\r\n"
"\r\n"
"12345", std::chrono::milliseconds(0));
transport_->addReadEvent("abcde", std::chrono::milliseconds(5));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, post_chunked) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler->txn_));
EXPECT_CALL(*handler, onHeadersComplete(_))
.WillOnce(Invoke([&] (std::shared_ptr<HTTPMessage> msg) {
const HTTPHeaders& hdrs = msg->getHeaders();
EXPECT_EQ(3, hdrs.size());
EXPECT_TRUE(hdrs.exists("host"));
EXPECT_TRUE(hdrs.exists("content-type"));
EXPECT_TRUE(hdrs.exists("transfer-encoding"));
EXPECT_TRUE(msg->getIsChunked());
EXPECT_FALSE(msg->getIsUpgraded());
EXPECT_EQ("http://example.com/cgi-bin/foo.aspx?abc&def",
msg->getURL());
EXPECT_EQ("/cgi-bin/foo.aspx", msg->getPath());
EXPECT_EQ("abc&def", msg->getQueryString());
EXPECT_EQ(1, msg->getHTTPVersion().first);
EXPECT_EQ(1, msg->getHTTPVersion().second);
}));
EXPECT_CALL(*handler, onChunkHeader(3));
EXPECT_CALL(*handler, onBody(_))
.WillOnce(ExpectString("bar"));
EXPECT_CALL(*handler, onChunkComplete());
EXPECT_CALL(*handler, onChunkHeader(0x22));
EXPECT_CALL(*handler, onBody(_))
.WillOnce(ExpectString("0123456789abcdef\nfedcba9876543210\n"));
EXPECT_CALL(*handler, onChunkComplete());
EXPECT_CALL(*handler, onChunkHeader(3));
EXPECT_CALL(*handler, onBody(_))
.WillOnce(ExpectString("foo"));
EXPECT_CALL(*handler, onChunkComplete());
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs(handler, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("POST http://example.com/cgi-bin/foo.aspx?abc&def "
"HTTP/1.1\r\n"
"Host: example.com\r\n"
"Content-Type: text/pla", std::chrono::milliseconds(0));
transport_->addReadEvent("in; charset=utf-8\r\n"
"Transfer-encoding: chunked\r\n"
"\r", std::chrono::milliseconds(2));
transport_->addReadEvent("\n"
"3\r\n"
"bar\r\n"
"22\r\n"
"0123456789abcdef\n"
"fedcba9876543210\n"
"\r\n"
"3\r", std::chrono::milliseconds(3));
transport_->addReadEvent("\n"
"foo\r\n"
"0\r\n\r\n", std::chrono::milliseconds(1));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, multi_message) {
MockHTTPHandler* handler1 = new MockHTTPHandler();
MockHTTPHandler* handler2 = new MockHTTPHandler();
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler1))
.WillOnce(Return(handler2));
InSequence dummy;
EXPECT_CALL(*handler1, setTransaction(_))
.WillOnce(SaveArg<0>(&handler1->txn_));
EXPECT_CALL(*handler1, onHeadersComplete(_));
EXPECT_CALL(*handler1, onBody(_))
.WillOnce(ExpectString("foo"))
.WillOnce(ExpectString("bar9876"));
EXPECT_CALL(*handler1, onEOM())
.WillOnce(InvokeWithoutArgs(handler1, &MockHTTPHandler::sendReply));
EXPECT_CALL(*handler1, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler1; }));
EXPECT_CALL(*handler2, setTransaction(_))
.WillOnce(SaveArg<0>(&handler2->txn_));
EXPECT_CALL(*handler2, onHeadersComplete(_));
EXPECT_CALL(*handler2, onChunkHeader(0xa));
EXPECT_CALL(*handler2, onBody(_))
.WillOnce(ExpectString("some "))
.WillOnce(ExpectString("data\n"));
EXPECT_CALL(*handler2, onChunkComplete());
EXPECT_CALL(*handler2, onEOM())
.WillOnce(InvokeWithoutArgs(handler2, &MockHTTPHandler::terminate));
EXPECT_CALL(*handler2, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler2; }));
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("POST / HTTP/1.1\r\n"
"Host: example.com\r\n"
"Content-Length: 10\r\n"
"\r\n"
"foo", std::chrono::milliseconds(0));
transport_->addReadEvent("bar9876"
"POST /foo HTTP/1.1\r\n"
"Host: exa", std::chrono::milliseconds(2));
transport_->addReadEvent("mple.com\r\n"
"Connection: close\r\n"
"Trans", std::chrono::milliseconds(0));
transport_->addReadEvent("fer-encoding: chunked\r\n"
"\r\n", std::chrono::milliseconds(2));
transport_->addReadEvent("a\r\nsome ", std::chrono::milliseconds(0));
transport_->addReadEvent("data\n\r\n0\r\n\r\n", std::chrono::milliseconds(2));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, connect) {
StrictMock<MockHTTPHandler> handler;
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler.txn_));
// Send HTTP 200 OK to accept the CONNECT request
EXPECT_CALL(handler, onHeadersComplete(_))
.WillOnce(Invoke([&handler] (std::shared_ptr<HTTPMessage> msg) {
handler.sendHeaders(200, 100);
}));
EXPECT_CALL(handler, onUpgrade(_));
// Data should be received using onBody
EXPECT_CALL(handler, onBody(_))
.WillOnce(ExpectString("12345"))
.WillOnce(ExpectString("abcde"));
EXPECT_CALL(handler, onEOM())
.WillOnce(InvokeWithoutArgs(&handler, &MockHTTPHandler::terminate));
EXPECT_CALL(handler, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("CONNECT test HTTP/1.1\r\n"
"\r\n"
"12345", std::chrono::milliseconds(0));
transport_->addReadEvent("abcde", std::chrono::milliseconds(5));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, connect_rejected) {
StrictMock<MockHTTPHandler> handler;
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler.txn_));
// Send HTTP 400 to reject the CONNECT request
EXPECT_CALL(handler, onHeadersComplete(_))
.WillOnce(Invoke([&handler] (std::shared_ptr<HTTPMessage> msg) {
handler.sendReplyCode(400);
}));
EXPECT_CALL(handler, onEOM())
.WillOnce(InvokeWithoutArgs(&handler, &MockHTTPHandler::terminate));
EXPECT_CALL(handler, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("CONNECT test HTTP/1.1\r\n"
"\r\n"
"12345", std::chrono::milliseconds(0));
transport_->addReadEvent("abcde", std::chrono::milliseconds(5));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, http_upgrade) {
StrictMock<MockHTTPHandler> handler;
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler.txn_));
// Send HTTP 101 Switching Protocls to accept the upgrade request
EXPECT_CALL(handler, onHeadersComplete(_))
.WillOnce(Invoke([&handler] (std::shared_ptr<HTTPMessage> msg) {
handler.sendHeaders(101, 100);
}));
// Send the response in the new protocol after upgrade
EXPECT_CALL(handler, onUpgrade(_))
.WillOnce(Invoke([&handler] (UpgradeProtocol protocol) {
handler.sendReplyCode(100);
}));
EXPECT_CALL(handler, onEOM())
.WillOnce(InvokeWithoutArgs(&handler, &MockHTTPHandler::terminate));
EXPECT_CALL(handler, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("GET /upgrade HTTP/1.1\r\n"
"Upgrade: TEST/1.0\r\n"
"Connection: upgrade\r\n"
"\r\n", std::chrono::milliseconds(0));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST(HTTPDownstreamTest, parse_error_no_txn) {
// 1) Get a parse error on SYN_STREAM for streamID == 1
// 2) Expect that the codec should be asked to generate an abort on
// streamID==1
EventBase evb;
// Setup the controller and its expecations.
NiceMock<MockController> mockController;
// Setup the codec, its callbacks, and its expectations.
auto codec = makeDownstreamParallelCodec();
HTTPCodec::Callback* codecCallback = nullptr;
EXPECT_CALL(*codec, setCallback(_))
.WillRepeatedly(SaveArg<0>(&codecCallback));
// Expect egress abort for streamID == 1
EXPECT_CALL(*codec, generateRstStream(_, 1, _));
// Setup transport
bool transportGood = true;
auto transport = newMockTransport(&evb);
EXPECT_CALL(*transport, good())
.WillRepeatedly(ReturnPointee(&transportGood));
EXPECT_CALL(*transport, closeNow())
.WillRepeatedly(Assign(&transportGood, false));
EXPECT_CALL(*transport, writeChain(_, _, _))
.WillRepeatedly(Invoke([&] (folly::AsyncTransportWrapper::WriteCallback* callback,
const shared_ptr<IOBuf> iob,
WriteFlags flags) {
callback->writeSuccess();
}));
// Create the downstream session, thus initializing codecCallback
auto transactionTimeouts = makeInternalTimeoutSet(&evb);
auto session = new HTTPDownstreamSession(
transactionTimeouts.get(),
AsyncTransportWrapper::UniquePtr(transport),
localAddr, peerAddr,
&mockController, std::move(codec),
mockTransportInfo);
session->startNow();
HTTPException ex(HTTPException::Direction::INGRESS_AND_EGRESS, "foo");
ex.setProxygenError(kErrorParseHeader);
ex.setCodecStatusCode(ErrorCode::REFUSED_STREAM);
codecCallback->onError(HTTPCodec::StreamID(1), ex, true);
// cleanup
session->shutdownTransportWithReset(kErrorConnectionReset);
evb.loop();
}
TEST(HTTPDownstreamTest, byte_events_drained) {
// Test that byte events are drained before socket is closed
EventBase evb;
NiceMock<MockController> mockController;
auto codec = makeDownstreamParallelCodec();
auto byteEventTracker = new MockByteEventTracker(nullptr);
auto transport = newMockTransport(&evb);
auto transactionTimeouts = makeInternalTimeoutSet(&evb);
// Create the downstream session
auto session = new HTTPDownstreamSession(
transactionTimeouts.get(),
AsyncTransportWrapper::UniquePtr(transport),
localAddr, peerAddr,
&mockController, std::move(codec),
mockTransportInfo);
session->setByteEventTracker(
std::unique_ptr<ByteEventTracker>(byteEventTracker));
InSequence dummy;
session->startNow();
// Byte events should be drained first
EXPECT_CALL(*byteEventTracker, drainByteEvents())
.Times(1);
EXPECT_CALL(*transport, closeWithReset())
.Times(AtLeast(1));
// Close the socket
session->shutdownTransportWithReset(kErrorConnectionReset);
evb.loop();
}
TEST_F(HTTPDownstreamSessionTest, trailers) {
testChunks(true);
}
TEST_F(HTTPDownstreamSessionTest, explicit_chunks) {
testChunks(false);
}
template <class C>
void HTTPDownstreamTest<C>::testChunks(bool trailers) {
StrictMock<MockHTTPHandler> handler;
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler.txn_));
EXPECT_CALL(handler, onHeadersComplete(_));
EXPECT_CALL(handler, onEOM())
.WillOnce(InvokeWithoutArgs([&handler, trailers] () {
handler.sendChunkedReplyWithBody(200, 100, 17, trailers);
}));
EXPECT_CALL(handler, detachTransaction());
transport_->addReadEvent("GET / HTTP/1.1\r\n"
"\r\n", std::chrono::milliseconds(0));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
HTTPSession::DestructorGuard g(httpSession_);
eventBase_.loop();
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
NiceMock<MockHTTPCodecCallback> callbacks;
EXPECT_CALL(callbacks, onMessageBegin(1, _))
.Times(1);
EXPECT_CALL(callbacks, onHeadersComplete(1, _))
.Times(1);
for (int i = 0; i < 6; i++) {
EXPECT_CALL(callbacks, onChunkHeader(1, _));
EXPECT_CALL(callbacks, onBody(1, _));
EXPECT_CALL(callbacks, onChunkComplete(1));
}
if (trailers) {
EXPECT_CALL(callbacks, onTrailersComplete(1, _));
}
EXPECT_CALL(callbacks, onMessageComplete(1, _));
clientCodec.setCallback(&callbacks);
parseOutput(clientCodec);
EXPECT_CALL(mockController_, detachSession(_));
}
TEST_F(HTTPDownstreamSessionTest, http_drain) {
StrictMock<MockHTTPHandler> handler1;
StrictMock<MockHTTPHandler> handler2;
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1));
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(SaveArg<0>(&handler1.txn_));
EXPECT_CALL(handler1, onHeadersComplete(_))
.WillOnce(Invoke([this, &handler1] (std::shared_ptr<HTTPMessage> msg) {
handler1.sendHeaders(200, 100);
httpSession_->notifyPendingShutdown();
}));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1] {
handler1.sendBody(100);
handler1.txn_->sendEOM();
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler2));
EXPECT_CALL(handler2, setTransaction(_))
.WillOnce(SaveArg<0>(&handler2.txn_));
EXPECT_CALL(handler2, onHeadersComplete(_))
.WillOnce(Invoke([this, &handler2] (std::shared_ptr<HTTPMessage> msg) {
handler2.sendHeaders(200, 100);
}));
EXPECT_CALL(handler2, onEOM())
.WillOnce(InvokeWithoutArgs([&handler2] {
handler2.sendBody(100);
handler2.txn_->sendEOM();
}));
EXPECT_CALL(handler2, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("GET / HTTP/1.1\r\n"
"\r\n", std::chrono::milliseconds(0));
transport_->addReadEvent("GET / HTTP/1.1\r\n"
"\r\n", std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
// 1) receive full request
// 2) notify pending shutdown
// 3) wait for session read timeout -> should be ignored
// 4) response completed
TEST_F(HTTPDownstreamSessionTest, http_drain_long_running) {
StrictMock<MockHTTPHandler> handler;
InSequence enforceSequence;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
// txn1, as soon as headers go out, mark set code shouldShutdown=true
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(SaveArg<0>(&handler.txn_));
EXPECT_CALL(handler, onHeadersComplete(_))
.WillOnce(Invoke([this, &handler] (std::shared_ptr<HTTPMessage> msg) {
httpSession_->notifyPendingShutdown();
eventBase_.tryRunAfterDelay([this] {
// simulate read timeout
httpSession_->timeoutExpired();
}, 100);
eventBase_.tryRunAfterDelay([&handler] {
handler.sendReplyWithBody(200, 100);
}, 200);
}));
EXPECT_CALL(handler, onEOM());
EXPECT_CALL(handler, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent("GET / HTTP/1.1\r\n"
"\r\n", std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, early_abort) {
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence dummy;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(Invoke([&] (HTTPTransaction* txn) {
handler->txn_ = txn;
handler->txn_->sendAbort();
}));
EXPECT_CALL(*handler, onHeadersComplete(_))
.Times(0);
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([&] { delete handler; }));
EXPECT_CALL(mockController_, detachSession(_));
addSingleByteReads("GET /somepath.php?param=foo HTTP/1.1\r\n"
"Host: example.com\r\n"
"Connection: close\r\n"
"\r\n");
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(SPDY3DownstreamSessionTest, http_paused_buffered) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
IOBufQueue rst{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
MockHTTPHandler handler1;
MockHTTPHandler handler2;
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
clientCodec.generateRstStream(rst, streamID, ErrorCode::CANCEL);
streamID += 2;
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1))
.WillOnce(Return(&handler2));
EXPECT_CALL(mockController_, detachSession(_));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
transport_->pauseWrites();
handler1.sendHeaders(200, 65536 * 2);
handler1.sendBody(65536 * 2);
}));
EXPECT_CALL(handler1, onEgressPaused());
EXPECT_CALL(handler2, setTransaction(_))
.WillOnce(Invoke([&handler2] (HTTPTransaction* txn) {
handler2.txn_ = txn; }));
EXPECT_CALL(handler2, onEgressPaused());
EXPECT_CALL(handler2, onHeadersComplete(_));
EXPECT_CALL(handler2, onEOM());
EXPECT_CALL(handler1, onError(_))
.WillOnce(Invoke([&] (const HTTPException& ex) {
ASSERT_EQ(ex.getProxygenError(), kErrorStreamAbort);
eventBase_.runInLoop([this] {
transport_->resumeWrites();
});
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(handler2, onEgressResumed())
.WillOnce(Invoke([&] () {
handler2.sendReplyWithBody(200, 32768);
}));
EXPECT_CALL(handler2, detachTransaction());
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->addReadEvent(rst, std::chrono::milliseconds(10));
transport_->addReadEOF(std::chrono::milliseconds(50));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, http_writes_draining_timeout) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
MockHTTPHandler handler1;
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
auto streamID = HTTPCodec::StreamID(0);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
clientCodec.generateHeader(requests, streamID, req);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1));
EXPECT_CALL(mockController_, detachSession(_));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
transport_->pauseWrites();
handler1.sendHeaders(200, 1000);
}));
EXPECT_CALL(handler1, onError(_))
.WillOnce(Invoke([&] (const HTTPException& ex) {
ASSERT_EQ(ex.getProxygenError(), kErrorWriteTimeout);
ASSERT_EQ(
folly::to<std::string>("WriteTimeout on transaction id: ",
handler1.txn_->getID()),
std::string(ex.what()));
handler1.txn_->sendAbort();
}));
EXPECT_CALL(handler1, detachTransaction());
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, http_rate_limit_normal) {
// The rate-limiting code grabs the event base from the EventBaseManager,
// so we need to set it.
folly::EventBaseManager::get()->setEventBase(&eventBase_, false);
// Create a request
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
MockHTTPHandler handler1;
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
auto streamID = HTTPCodec::StreamID(0);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
// The controller should return the handler when asked
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillRepeatedly(Return(&handler1));
// Set a low rate-limit on the transaction
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
uint32_t rateLimit_kbps = 640;
txn->setEgressRateLimit(rateLimit_kbps * 1024);
handler1.txn_ = txn;
}));
// Send a somewhat big response that we know will get rate-limited
InSequence handlerSequence;
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
// At 640kbps, this should take slightly over 800ms
uint32_t rspLengthBytes = 100000;
handler1.sendHeaders(200, rspLengthBytes);
handler1.sendBody(rspLengthBytes);
handler1.txn_->sendEOM();
}));
EXPECT_CALL(handler1, detachTransaction());
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->startReadEvents();
// Keep the session around even after the event base loop completes so we can
// read the counters on a valid object.
HTTPSession::DestructorGuard g(httpSession_);
eventBase_.loop();
proxygen::TimePoint timeFirstWrite =
transport_->getWriteEvents()->front()->getTime();
proxygen::TimePoint timeLastWrite =
transport_->getWriteEvents()->back()->getTime();
int64_t writeDuration =
(int64_t)millisecondsBetween(timeLastWrite, timeFirstWrite).count();
EXPECT_GT(writeDuration, 800);
}
TEST_F(SPDY3DownstreamSessionTest, spdy_rate_limit_normal) {
// The rate-limiting code grabs the event base from the EventBaseManager,
// so we need to set it.
folly::EventBaseManager::get()->setEventBase(&eventBase_, false);
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
MockHTTPHandler handler1;
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
clientCodec.getEgressSettings()->setSetting(SettingsId::INITIAL_WINDOW_SIZE,
100000);
clientCodec.generateSettings(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillRepeatedly(Return(&handler1));
EXPECT_CALL(mockController_, detachSession(_));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
uint32_t rateLimit_kbps = 640;
txn->setEgressRateLimit(rateLimit_kbps * 1024);
handler1.txn_ = txn;
}));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
// At 640kbps, this should take slightly over 800ms
uint32_t rspLengthBytes = 100000;
handler1.sendHeaders(200, rspLengthBytes);
handler1.sendBody(rspLengthBytes);
handler1.txn_->sendEOM();
}));
EXPECT_CALL(handler1, detachTransaction());
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->addReadEOF(std::chrono::milliseconds(50));
transport_->startReadEvents();
// Keep the session around even after the event base loop completes so we can
// read the counters on a valid object.
HTTPSession::DestructorGuard g(httpSession_);
eventBase_.loop();
proxygen::TimePoint timeFirstWrite =
transport_->getWriteEvents()->front()->getTime();
proxygen::TimePoint timeLastWrite =
transport_->getWriteEvents()->back()->getTime();
int64_t writeDuration =
(int64_t)millisecondsBetween(timeLastWrite, timeFirstWrite).count();
EXPECT_GT(writeDuration, 800);
}
/**
* This test will reset the connection while the server is waiting around
* to send more bytes (so as to keep under the rate limit).
*/
TEST_F(SPDY3DownstreamSessionTest, spdy_rate_limit_rst) {
// The rate-limiting code grabs the event base from the EventBaseManager,
// so we need to set it.
folly::EventBaseManager::get()->setEventBase(&eventBase_, false);
IOBufQueue requests{IOBufQueue::cacheChainLength()};
IOBufQueue rst{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
MockHTTPHandler handler1;
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
clientCodec.getEgressSettings()->setSetting(SettingsId::INITIAL_WINDOW_SIZE,
100000);
clientCodec.generateSettings(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
clientCodec.generateRstStream(rst, streamID, ErrorCode::CANCEL);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillRepeatedly(Return(&handler1));
EXPECT_CALL(mockController_, detachSession(_));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
uint32_t rateLimit_kbps = 640;
txn->setEgressRateLimit(rateLimit_kbps * 1024);
handler1.txn_ = txn;
}));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
uint32_t rspLengthBytes = 100000;
handler1.sendHeaders(200, rspLengthBytes);
handler1.sendBody(rspLengthBytes);
handler1.txn_->sendEOM();
}));
EXPECT_CALL(handler1, onError(_));
EXPECT_CALL(handler1, detachTransaction());
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->addReadEvent(rst, std::chrono::milliseconds(10));
transport_->addReadEOF(std::chrono::milliseconds(50));
transport_->startReadEvents();
eventBase_.loop();
}
// Send a 1.0 request, egress the EOM with the last body chunk on a paused
// socket, and let it timeout. shutdownTransportWithReset will result in a call
// to removeTransaction with writesDraining_=true
TEST_F(HTTPDownstreamSessionTest, write_timeout) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
MockHTTPHandler handler1;
HTTPMessage req = getGetRequest();
req.setHTTPVersion(1, 0);
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
auto streamID = HTTPCodec::StreamID(0);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
handler1.sendHeaders(200, 100);
eventBase_.tryRunAfterDelay([&handler1, this] {
transport_->pauseWrites();
handler1.sendBody(100);
handler1.txn_->sendEOM();
}, 50);
}));
EXPECT_CALL(handler1, onError(_))
.WillOnce(Invoke([&] (const HTTPException& ex) {
ASSERT_EQ(ex.getProxygenError(), kErrorWriteTimeout);
ASSERT_EQ(
folly::to<std::string>("WriteTimeout on transaction id: ",
handler1.txn_->getID()),
std::string(ex.what()));
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent(requests, std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
// Send an abort from the write timeout path while pipelining
TEST_F(HTTPDownstreamSessionTest, write_timeout_pipeline) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
MockHTTPHandler handler1;
HTTPMessage req = getGetRequest();
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
const char* buf = "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n"
"GET / HTTP/1.1\r\nHost: localhost\r\n\r\n";
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
handler1.sendHeaders(200, 100);
eventBase_.tryRunAfterDelay([&handler1, this] {
transport_->pauseWrites();
handler1.sendBody(100);
handler1.txn_->sendEOM();
}, 50);
}));
EXPECT_CALL(handler1, onError(_))
.WillOnce(Invoke([&] (const HTTPException& ex) {
ASSERT_EQ(ex.getProxygenError(), kErrorWriteTimeout);
ASSERT_EQ(
folly::to<std::string>("WriteTimeout on transaction id: ",
handler1.txn_->getID()),
std::string(ex.what()));
handler1.txn_->sendAbort();
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent(buf, std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, body_packetization) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
MockHTTPHandler handler1;
HTTPMessage req = getGetRequest();
req.setHTTPVersion(1, 0);
req.setWantsKeepalive(false);
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
auto streamID = HTTPCodec::StreamID(0);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
handler1.sendReplyWithBody(200, 32768);
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent(requests, std::chrono::milliseconds(0));
transport_->startReadEvents();
// Keep the session around even after the event base loop completes so we can
// read the counters on a valid object.
HTTPSession::DestructorGuard g(httpSession_);
eventBase_.loop();
EXPECT_EQ(transport_->getWriteEvents()->size(), 1);
}
TEST_F(HTTPDownstreamSessionTest, http_malformed_pkt1) {
// Create a HTTP connection and keep sending just '\n' to the HTTP1xCodec.
std::string data(90000, '\n');
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent(data.c_str(), data.length(),
std::chrono::milliseconds(0));
transport_->addReadEOF(std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
}
TEST_F(HTTPDownstreamSessionTest, big_explcit_chunk_write) {
// even when the handler does a massive write, the transport only gets small
// writes
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
HTTP1xCodec clientCodec(TransportDirection::UPSTREAM);
auto streamID = HTTPCodec::StreamID(0);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
MockHTTPHandler handler;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(Invoke([&handler] (HTTPTransaction* txn) {
handler.txn_ = txn; }));
EXPECT_CALL(handler, onHeadersComplete(_))
.WillOnce(Invoke([&handler] (std::shared_ptr<HTTPMessage> msg) {
handler.sendHeaders(200, 100, false);
size_t len = 16 * 1024 * 1024;
handler.txn_->sendChunkHeader(len);
auto chunk = makeBuf(len);
handler.txn_->sendBody(std::move(chunk));
handler.txn_->sendChunkTerminator();
handler.txn_->sendEOM();
}));
EXPECT_CALL(handler, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent(requests, std::chrono::milliseconds(0));
transport_->startReadEvents();
// Keep the session around even after the event base loop completes so we can
// read the counters on a valid object.
HTTPSession::DestructorGuard g(httpSession_);
eventBase_.loop();
EXPECT_GT(transport_->getWriteEvents()->size(), 250);
}
TEST_F(SPDY2DownstreamSessionTest, spdy_prio) {
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY2);
testPriorities(clientCodec, 4);
}
TEST_F(SPDY3DownstreamSessionTest, spdy_prio) {
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
testPriorities(clientCodec, 8);
}
template <class C>
void HTTPDownstreamTest<C>::testPriorities(
HTTPCodec& clientCodec, uint32_t numPriorities) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
uint32_t iterations = 10;
uint32_t maxPriority = numPriorities - 1;
HTTPMessage req = getGetRequest();
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
for (int pri = numPriorities - 1; pri >= 0; pri--) {
req.setPriority(pri * (8 / numPriorities));
for (uint32_t i = 0; i < iterations; i++) {
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
MockHTTPHandler* handler = new MockHTTPHandler();
InSequence handlerSequence;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler));
EXPECT_CALL(*handler, setTransaction(_))
.WillOnce(Invoke([handler] (HTTPTransaction* txn) {
handler->txn_ = txn; }));
EXPECT_CALL(*handler, onHeadersComplete(_));
EXPECT_CALL(*handler, onEOM())
.WillOnce(InvokeWithoutArgs([handler] {
handler->sendReplyWithBody(200, 1000);
}));
EXPECT_CALL(*handler, detachTransaction())
.WillOnce(InvokeWithoutArgs([handler] { delete handler; }));
streamID += 2;
}
}
unique_ptr<IOBuf> head = requests.move();
head->coalesce();
transport_->addReadEvent(head->data(), head->length(),
std::chrono::milliseconds(0));
transport_->startReadEvents();
eventBase_.loop();
NiceMock<MockHTTPCodecCallback> callbacks;
std::list<HTTPCodec::StreamID> streams;
EXPECT_CALL(callbacks, onMessageBegin(_, _))
.Times(iterations * numPriorities);
EXPECT_CALL(callbacks, onHeadersComplete(_, _))
.Times(iterations * numPriorities);
// body is variable and hence ignored
EXPECT_CALL(callbacks, onMessageComplete(_, _))
.Times(iterations * numPriorities)
.WillRepeatedly(Invoke([&] (HTTPCodec::StreamID stream, bool upgrade) {
streams.push_back(stream);
}));
clientCodec.setCallback(&callbacks);
parseOutput(clientCodec);
// transactions finish in priority order (higher streamIDs first)
EXPECT_EQ(streams.size(), iterations * numPriorities);
auto txn = streams.begin();
for (int band = maxPriority; band >= 0; band--) {
auto upperID = iterations * 2 * (band + 1);
auto lowerID = iterations * 2 * band;
for (uint32_t i = 0; i < iterations; i++) {
EXPECT_LE(lowerID, (uint32_t)*txn);
EXPECT_GE(upperID, (uint32_t)*txn);
++txn;
}
}
}
// Verifies that the read timeout is not running when no ingress is expected/
// required to proceed
TEST_F(SPDY3DownstreamSessionTest, spdy_timeout) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
clientCodec.generateConnectionPreface(requests);
for (auto streamID = HTTPCodec::StreamID(1); streamID <= 3; streamID += 2) {
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
}
MockHTTPHandler* handler1 = new StrictMock<MockHTTPHandler>();
MockHTTPHandler* handler2 = new StrictMock<MockHTTPHandler>();
HTTPSession::setPendingWriteMax(512);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(handler1))
.WillOnce(Return(handler2));
InSequence handlerSequence;
EXPECT_CALL(*handler1, setTransaction(_))
.WillOnce(Invoke([handler1] (HTTPTransaction* txn) {
handler1->txn_ = txn; }));
EXPECT_CALL(*handler1, onHeadersComplete(_))
.WillOnce(InvokeWithoutArgs([this] { transport_->pauseWrites(); }));
EXPECT_CALL(*handler1, onEOM())
.WillOnce(InvokeWithoutArgs([handler1] {
handler1->sendHeaders(200, 1000);
handler1->sendBody(1000);
}));
EXPECT_CALL(*handler1, onEgressPaused());
EXPECT_CALL(*handler2, setTransaction(_))
.WillOnce(Invoke([handler2] (HTTPTransaction* txn) {
handler2->txn_ = txn; }));
EXPECT_CALL(*handler2, onEgressPaused());
EXPECT_CALL(*handler2, onHeadersComplete(_));
EXPECT_CALL(*handler2, onEOM())
.WillOnce(InvokeWithoutArgs([handler2, this] {
// This transaction should start egress paused. We've received the
// EOM, so the timeout shouldn't be running delay 400ms and resume
// writes, this keeps txn1 from getting a write timeout
eventBase_.tryRunAfterDelay([this] {
transport_->resumeWrites();
}, 400);
}));
EXPECT_CALL(*handler1, onEgressResumed())
.WillOnce(InvokeWithoutArgs([handler1] { handler1->txn_->sendEOM(); }));
EXPECT_CALL(*handler2, onEgressResumed())
.WillOnce(InvokeWithoutArgs([handler2, this] {
// delay an additional 200ms. The total 600ms delay shouldn't fire
// onTimeout
eventBase_.tryRunAfterDelay([handler2] {
handler2->sendReplyWithBody(200, 400); }, 200
);
}));
EXPECT_CALL(*handler1, detachTransaction())
.WillOnce(InvokeWithoutArgs([handler1] { delete handler1; }));
EXPECT_CALL(*handler2, detachTransaction())
.WillOnce(InvokeWithoutArgs([handler2] { delete handler2; }));
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->startReadEvents();
eventBase_.loop();
}
// Verifies that the read timer is running while a transaction is blocked
// on a window update
TEST_F(SPDY3DownstreamSessionTest, spdy_timeout_win) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
clientCodec.getEgressSettings()->setSetting(SettingsId::INITIAL_WINDOW_SIZE,
500);<|fim▁hole|> clientCodec.generateSettings(requests);
clientCodec.generateHeader(requests, streamID, req, 0, false, nullptr);
clientCodec.generateEOM(requests, streamID);
StrictMock<MockHTTPHandler> handler;
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler));
InSequence handlerSequence;
EXPECT_CALL(handler, setTransaction(_))
.WillOnce(Invoke([&] (HTTPTransaction* txn) {
handler.txn_ = txn; }));
EXPECT_CALL(handler, onHeadersComplete(_));
EXPECT_CALL(handler, onEOM())
.WillOnce(InvokeWithoutArgs([&] {
handler.sendReplyWithBody(200, 1000);
}));
EXPECT_CALL(handler, onEgressPaused());
EXPECT_CALL(handler, onError(_))
.WillOnce(Invoke([&] (const HTTPException& ex) {
ASSERT_EQ(ex.getProxygenError(), kErrorTimeout);
ASSERT_EQ(
folly::to<std::string>("ingress timeout, streamID=", streamID),
std::string(ex.what()));
handler.terminate();
}));
EXPECT_CALL(handler, detachTransaction());
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->startReadEvents();
eventBase_.loop();
}
TYPED_TEST_CASE_P(HTTPDownstreamTest);
TYPED_TEST_P(HTTPDownstreamTest, testWritesDraining) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
auto clientCodec =
makeClientCodec<typename TypeParam::Codec>(TypeParam::version);
auto badCodec =
makeServerCodec<typename TypeParam::Codec>(TypeParam::version);
auto streamID = HTTPCodec::StreamID(1);
clientCodec->generateConnectionPreface(requests);
clientCodec->generateHeader(requests, streamID, req);
clientCodec->generateEOM(requests, streamID);
streamID += 1;
badCodec->generateHeader(requests, streamID, req, 1);
MockHTTPHandler handler1;
EXPECT_CALL(this->mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1));
EXPECT_CALL(this->mockController_, detachSession(_));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM());
EXPECT_CALL(handler1, onError(_))
.WillOnce(Invoke([&] (const HTTPException& ex) {
ASSERT_EQ(ex.getProxygenError(), kErrorEOF);
ASSERT_EQ("Shutdown transport: EOF", std::string(ex.what()));
}));
EXPECT_CALL(handler1, detachTransaction());
this->transport_->addReadEvent(requests, std::chrono::milliseconds(10));
this->transport_->startReadEvents();
this->eventBase_.loop();
}
TYPED_TEST_P(HTTPDownstreamTest, testBodySizeLimit) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
auto clientCodec =
makeClientCodec<typename TypeParam::Codec>(TypeParam::version);
auto streamID = HTTPCodec::StreamID(1);
clientCodec->generateConnectionPreface(requests);
clientCodec->generateHeader(requests, streamID, req);
clientCodec->generateEOM(requests, streamID);
streamID += 2;
clientCodec->generateHeader(requests, streamID, req, 0);
clientCodec->generateEOM(requests, streamID);
MockHTTPHandler handler1;
MockHTTPHandler handler2;
EXPECT_CALL(this->mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1))
.WillOnce(Return(&handler2));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM());
EXPECT_CALL(handler2, setTransaction(_))
.WillOnce(Invoke([&handler2] (HTTPTransaction* txn) {
handler2.txn_ = txn; }));
EXPECT_CALL(handler2, onHeadersComplete(_));
EXPECT_CALL(handler2, onEOM())
.WillOnce(InvokeWithoutArgs([&] {
handler1.sendReplyWithBody(200, 5000);
handler2.sendReplyWithBody(200, 5000);
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(handler2, detachTransaction());
this->transport_->addReadEvent(requests, std::chrono::milliseconds(10));
this->transport_->startReadEvents();
this->eventBase_.loop();
NiceMock<MockHTTPCodecCallback> callbacks;
std::list<HTTPCodec::StreamID> streams;
EXPECT_CALL(callbacks, onMessageBegin(1, _));
EXPECT_CALL(callbacks, onHeadersComplete(1, _));
EXPECT_CALL(callbacks, onMessageBegin(3, _));
EXPECT_CALL(callbacks, onHeadersComplete(3, _));
EXPECT_CALL(callbacks, onBody(1, _));
EXPECT_CALL(callbacks, onBody(3, _));
EXPECT_CALL(callbacks, onBody(1, _));
EXPECT_CALL(callbacks, onMessageComplete(1, _));
EXPECT_CALL(callbacks, onBody(3, _));
EXPECT_CALL(callbacks, onMessageComplete(3, _));
clientCodec->setCallback(&callbacks);
this->parseOutput(*clientCodec);
}
TYPED_TEST_P(HTTPDownstreamTest, testUniformPauseState) {
HTTPSession::setPendingWriteMax(12000);
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
req.setPriority(1);
auto clientCodec =
makeClientCodec<typename TypeParam::Codec>(TypeParam::version);
auto streamID = HTTPCodec::StreamID(1);
clientCodec->generateConnectionPreface(requests);
clientCodec->getEgressSettings()->setSetting(SettingsId::INITIAL_WINDOW_SIZE,
1000000);
clientCodec->generateSettings(requests);
clientCodec->generateWindowUpdate(requests, 0, 1000000);
clientCodec->generateHeader(requests, streamID, req);
clientCodec->generateEOM(requests, streamID);
streamID += 2;
clientCodec->generateHeader(requests, streamID, req, 0);
clientCodec->generateEOM(requests, streamID);
StrictMock<MockHTTPHandler> handler1;
StrictMock<MockHTTPHandler> handler2;
EXPECT_CALL(this->mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1))
.WillOnce(Return(&handler2));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM());
EXPECT_CALL(handler2, setTransaction(_))
.WillOnce(Invoke([&handler2] (HTTPTransaction* txn) {
handler2.txn_ = txn; }));
EXPECT_CALL(handler2, onHeadersComplete(_));
EXPECT_CALL(handler2, onEOM())
.WillOnce(InvokeWithoutArgs([&] {
handler1.sendHeaders(200, 24000);
// triggers pause of all txns
this->transport_->pauseWrites();
handler1.txn_->sendBody(std::move(makeBuf(12000)));
this->eventBase_.runAfterDelay([this] {
this->transport_->resumeWrites();
}, 50);
}));
EXPECT_CALL(handler1, onEgressPaused());
EXPECT_CALL(handler2, onEgressPaused());
EXPECT_CALL(handler1, onEgressResumed())
.WillOnce(InvokeWithoutArgs([&] {
// resume does not trigger another pause,
handler1.txn_->sendBody(std::move(makeBuf(12000)));
}));
EXPECT_CALL(handler2, onEgressResumed())
.WillOnce(InvokeWithoutArgs([&] {
handler2.sendHeaders(200, 12000);
handler2.txn_->sendBody(std::move(makeBuf(12000)));
this->transport_->pauseWrites();
this->eventBase_.runAfterDelay([this] {
this->transport_->resumeWrites();
}, 50);
}));
EXPECT_CALL(handler1, onEgressPaused());
EXPECT_CALL(handler2, onEgressPaused());
EXPECT_CALL(handler1, onEgressResumed());
EXPECT_CALL(handler2, onEgressResumed())
.WillOnce(InvokeWithoutArgs([&] {
handler1.txn_->sendEOM();
handler2.txn_->sendEOM();
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(handler2, detachTransaction());
this->transport_->addReadEvent(requests, std::chrono::milliseconds(10));
this->transport_->startReadEvents();
this->eventBase_.loop();
}
// Set max streams=1
// send two spdy requests a few ms apart.
// Block writes
// generate a complete response for txn=1 before parsing txn=3
// HTTPSession should allow the txn=3 to be served rather than refusing it
TEST_F(SPDY3DownstreamSessionTest, spdy_max_concurrent_streams) {
IOBufQueue requests{IOBufQueue::cacheChainLength()};
StrictMock<MockHTTPHandler> handler1;
StrictMock<MockHTTPHandler> handler2;
HTTPMessage req = getGetRequest();
req.setHTTPVersion(1, 0);
req.setWantsKeepalive(false);
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
streamID += 2;
clientCodec.generateHeader(requests, streamID, req);
clientCodec.generateEOM(requests, streamID);
httpSession_->getCodecFilterChain()->getEgressSettings()->setSetting(
SettingsId::MAX_CONCURRENT_STREAMS, 1);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handler1))
.WillOnce(Return(&handler2));
InSequence handlerSequence;
EXPECT_CALL(handler1, setTransaction(_))
.WillOnce(Invoke([&handler1] (HTTPTransaction* txn) {
handler1.txn_ = txn; }));
EXPECT_CALL(handler1, onHeadersComplete(_));
EXPECT_CALL(handler1, onEOM())
.WillOnce(InvokeWithoutArgs([&handler1, this] {
transport_->pauseWrites();
handler1.sendReplyWithBody(200, 100);
}));
EXPECT_CALL(handler2, setTransaction(_))
.WillOnce(Invoke([&handler2] (HTTPTransaction* txn) {
handler2.txn_ = txn; }));
EXPECT_CALL(handler2, onHeadersComplete(_));
EXPECT_CALL(handler2, onEOM())
.WillOnce(InvokeWithoutArgs([&handler2, this] {
handler2.sendReplyWithBody(200, 100);
eventBase_.runInLoop([this] {
transport_->resumeWrites();
});
}));
EXPECT_CALL(handler1, detachTransaction());
EXPECT_CALL(handler2, detachTransaction());
EXPECT_CALL(mockController_, detachSession(_));
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->startReadEvents();
transport_->addReadEOF(std::chrono::milliseconds(10));
eventBase_.loop();
}
REGISTER_TYPED_TEST_CASE_P(HTTPDownstreamTest,
testWritesDraining, testBodySizeLimit,
testUniformPauseState);
typedef ::testing::Types<SPDY2CodecPair, SPDY3CodecPair, SPDY3_1CodecPair,
HTTP2CodecPair> ParallelCodecs;
INSTANTIATE_TYPED_TEST_CASE_P(ParallelCodecs,
HTTPDownstreamTest,
ParallelCodecs);
class SPDY31DownstreamTest : public HTTPDownstreamTest<SPDY3_1CodecPair> {
public:
SPDY31DownstreamTest()
: HTTPDownstreamTest<SPDY3_1CodecPair>(2 * spdy::kInitialWindow) {}
};
TEST_F(SPDY31DownstreamTest, testSessionFlowControl) {
eventBase_.loopOnce();
NiceMock<MockHTTPCodecCallback> callbacks;
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3_1);
InSequence sequence;
EXPECT_CALL(callbacks, onSettings(_));
EXPECT_CALL(callbacks, onWindowUpdate(0, spdy::kInitialWindow));
clientCodec.setCallback(&callbacks);
parseOutput(clientCodec);
}
TEST_F(SPDY3DownstreamSessionTest, new_txn_egress_paused) {
// Send 1 request with prio=0
// Have egress pause while sending the first response
// Send a second request with prio=1
// -- the new txn should start egress paused
// Finish the body and eom both responses
// Unpause egress
// The first txn should complete first
std::array<StrictMock<MockHTTPHandler>, 2> handlers;
IOBufQueue requests{IOBufQueue::cacheChainLength()};
HTTPMessage req = getGetRequest();
SPDYCodec clientCodec(TransportDirection::UPSTREAM,
SPDYVersion::SPDY3);
auto streamID = HTTPCodec::StreamID(1);
clientCodec.generateConnectionPreface(requests);
req.setPriority(0);
clientCodec.generateHeader(requests, streamID, req, 0, nullptr);
clientCodec.generateEOM(requests, streamID);
streamID += 2;
req.setPriority(1);
clientCodec.generateHeader(requests, streamID, req, 0, nullptr);
clientCodec.generateEOM(requests, streamID);
EXPECT_CALL(mockController_, getRequestHandler(_, _))
.WillOnce(Return(&handlers[0]))
.WillOnce(Return(&handlers[1]));
HTTPSession::setPendingWriteMax(200); // lower the per session buffer limit
{
InSequence handlerSequence;
EXPECT_CALL(handlers[0], setTransaction(_))
.WillOnce(Invoke([&handlers] (HTTPTransaction* txn) {
handlers[0].txn_ = txn; }));
EXPECT_CALL(handlers[0], onHeadersComplete(_));
EXPECT_CALL(handlers[0], onEOM())
.WillOnce(Invoke([this, &handlers] {
this->transport_->pauseWrites();
handlers[0].sendHeaders(200, 1000);
handlers[0].sendBody(100); // headers + 100 bytes - over the limit
}));
EXPECT_CALL(handlers[0], onEgressPaused())
.WillOnce(InvokeWithoutArgs([] {
LOG(INFO) << "paused 1";
}));
EXPECT_CALL(handlers[1], setTransaction(_))
.WillOnce(Invoke([&handlers] (HTTPTransaction* txn) {
handlers[1].txn_ = txn; }));
EXPECT_CALL(handlers[1], onEgressPaused()); // starts paused
EXPECT_CALL(handlers[1], onHeadersComplete(_));
EXPECT_CALL(handlers[1], onEOM())
.WillOnce(InvokeWithoutArgs([&handlers, this] {
// Technically shouldn't send while handler is egress
// paused, but meh.
handlers[0].sendBody(900);
handlers[0].txn_->sendEOM();
handlers[1].sendReplyWithBody(200, 1000);
eventBase_.runInLoop([this] {
transport_->resumeWrites();
});
}));
EXPECT_CALL(handlers[0], detachTransaction());
EXPECT_CALL(handlers[1], detachTransaction());
}
transport_->addReadEvent(requests, std::chrono::milliseconds(10));
transport_->startReadEvents();
transport_->addReadEOF(std::chrono::milliseconds(10));
HTTPSession::DestructorGuard g(httpSession_);
eventBase_.loop();
NiceMock<MockHTTPCodecCallback> callbacks;
std::list<HTTPCodec::StreamID> streams;
EXPECT_CALL(callbacks, onMessageBegin(_, _))
.Times(2);
EXPECT_CALL(callbacks, onHeadersComplete(_, _))
.Times(2);
// body is variable and hence ignored;
EXPECT_CALL(callbacks, onMessageComplete(_, _))
.WillRepeatedly(Invoke([&] (HTTPCodec::StreamID stream, bool upgrade) {
streams.push_back(stream);
}));
clientCodec.setCallback(&callbacks);
parseOutput(clientCodec);
EXPECT_CALL(mockController_, detachSession(_));
}<|fim▁end|> | |
<|file_name|>sarray_reader_buffer.hpp<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2016 Turi
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
#ifndef GRAPHLAB_SARRAY_READER_BUFFER
#define GRAPHLAB_SARRAY_READER_BUFFER
#include <memory>
#include <vector>
#include <sframe/sframe_constants.hpp>
namespace graphlab {
template <typename T>
class sarray;
/**
* A buffered reader reading from a range of an sarray<T>.
*
* \code
* sarray<flexible_type> mysarray = ...;
*
* // Reader for the first thousand lines
* sarray_reader_buffer<flexible_type> reader(mysarray, 0, 1000);
*
* while(reader.has_next()) {
* flexible_type val = reader.next();
* ... do some thing with val ...
* }
*
* // Reader for the entire sarray
* reader = sarray_reader_buffer<flexible_type>(mysarray, 0, (size_t)(-1));
* ...
* \endcode
*
* Internally, the reader maintains a vector as buffer, and when reading
* reaches the end of the buffer, refill the buffer by reading from sarray.
*/
template<typename T>
class sarray_reader_buffer {
public:
typedef T value_type;
sarray_reader_buffer() = default;
/// Construct from sarray reader with begin and end row.
sarray_reader_buffer(
std::shared_ptr<typename sarray<T>::reader_type> reader,
size_t row_start, size_t row_end,
size_t buffer_size = DEFAULT_SARRAY_READER_BUFFER_SIZE) {
init(reader, row_start, row_end, buffer_size);
}
void init(std::shared_ptr<typename sarray<T>::reader_type>& reader,
size_t row_start, size_t row_end,
size_t internal_buffer_size = DEFAULT_SARRAY_READER_BUFFER_SIZE) {
m_reader = reader;
m_buffer_pos = 0;
m_iter = row_start;
m_original_row_start = row_start;
m_row_start = row_start;
m_row_end = std::min(row_end, m_reader->size());
m_buffer_size = internal_buffer_size;
m_buffer.clear();
}
/// Return the next element in the reader.
value_type&& next();
/// Return true if the reader has more element.
bool has_next();
/// Return the buffer.
inline std::vector<value_type>& get_buffer() {return m_buffer;}
/// Return the Number of elements between row_start and row_end.
inline size_t size() {return m_row_end - m_original_row_start;}
/** Resets the buffer to the initial starting conditions. Reading
* from the buffer again will start from row_start.
*/
void clear();
private:
/// Refill the chunk buffer form the sarray reader.
void refill();
typedef typename sarray<T>::reader_type reader_type;
/// Buffer the prefetched elements.
std::vector<value_type> m_buffer;
/// The underlying reader as a data source.
std::shared_ptr<reader_type> m_reader;
/// Current position of the buffer reader.
size_t m_buffer_pos = 0;<|fim▁hole|> size_t m_row_start = 0;
/// End row of the chunk.
size_t m_row_end = 0;
/// The size of the buffer vector
size_t m_buffer_size = 0;
/// The current iterator location
size_t m_iter = 0;
};
/// Return the next element in the chunk.
template<typename T>
T&& sarray_reader_buffer<T>::next() {
if (m_buffer_pos == m_buffer.size()) {
refill();
m_buffer_pos = 0;
}
DASSERT_LT(m_buffer_pos, m_buffer.size());
++m_iter;
return std::move(m_buffer[m_buffer_pos++]);
}
/// Return true if the chunk has remaining element.
template<typename T>
bool sarray_reader_buffer<T>::has_next() {
return m_iter < m_row_end;
}
/// Refill the chunk buffer form the sarray reader.
template<typename T>
void sarray_reader_buffer<T>::refill() {
size_t size_of_refill = std::min<size_t>(m_row_end - m_row_start, m_buffer_size);
m_reader->read_rows(m_row_start, m_row_start + size_of_refill, m_buffer);
m_row_start += size_of_refill;
}
template<typename T>
void sarray_reader_buffer<T>::clear() {
m_buffer.clear();
m_row_start = m_original_row_start;
m_iter = m_original_row_start;
m_buffer_pos = 0;
}
}
#endif<|fim▁end|> | /// The initial starting point. clear() will reset row_start to here.
size_t m_original_row_start = 0;
/// Start row of the remaining chunk. |
<|file_name|>conference.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = '[email protected] (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.ext import ndb
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import TeeShirtSize
from settings import WEB_CLIENT_ID
from utils import getUserId
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api( name='conference',
version='v1',
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# TODO 1
# step 1. copy utils.py from additions folder to this folder<|fim▁hole|> # step 2. get user id by calling getUserId(user)
# step 3. create a new key of kind Profile from the id
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
# TODO 3
# get the entity from datastore by using get() on the key
profile = p_key.get()
if not profile:
profile = Profile(
key = p_key, # TODO 1 step 4. replace with the key from step 3
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
# TODO 2
# save the profile to datastore
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
# TODO 4
# put the modified profile to datastore
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# registers API
api = endpoints.api_server([ConferenceApi])<|fim▁end|> | # and import getUserId from it |
<|file_name|>google.js<|end_file_name|><|fim▁begin|>import { postToServer } from "../app/utils";
import { wait } from "../app/utils";
/**
* Google Authentication and linking module
* @returns {{link: function:object, login: function}}
*/
const googleProvider = new firebase.auth.GoogleAuthProvider();
function callIfFunction(func, arg1, arg2) {
if ($.isFunction(func))
func(arg1, arg2);
}
export function login() {
return firebase.auth().signInWithPopup(googleProvider)
.then(function (result) {
const userLoggedIn = result && result.user;
if (userLoggedIn)
return result.user.getIdToken(false);
else
throw Error('Could not authenticate');
});
}
/**
* Google Linking Module
* @returns {{linkGoogle: function(callback), unlinkGoogle: function(callback), isGoogleLinked: function(callback}}
*/
export function link() {
function getProviderData(providerDataArray, providerId) {
if (!providerDataArray)
return null;
for (var i = 0; i < providerDataArray.length; i++)
if (providerDataArray[i].providerId === providerId)
return providerDataArray[i];
return null;
}
function isProvider(providerData, providerId) {
return getProviderData(providerData, providerId) !== null;
}
function updateUser(user, providerData) {
if (!providerData)
return;
const updateUser = {};
if (!user.displayName)
updateUser.displayName = providerData.displayName;
if (!user.photoURL)
updateUser.photoURL = providerData.photoURL;
user.updateProfile(updateUser)
.then(function () {
return firebase.auth().currentUser.getIdToken(false);
}).then(function (token) {
return postToServer('/profile/api/link', token);
}).then(function (response) {
if (response !== 'OK')
return;
wait(5000).then(function () {
window.location = '/profile?refresh';
});
});
}
function accountLinked(linkCallback, user) {
callIfFunction(linkCallback, true);
updateUser(user, getProviderData(user.providerData, googleProvider.providerId));
}
function deletePreviousUser(prevUser, credential) {
const auth = firebase.auth();
return auth.signInWithCredential(credential)
.then(function(user) {
return user.delete();
}).then(function() {
return prevUser.linkWithCredential(credential);
}).then(function() {
return auth.signInWithCredential(credential);
});
}
return {
linkGoogle: function (linkCallback) {
firebase.auth().currentUser.linkWithPopup(googleProvider)
.then(function(result) {
accountLinked(linkCallback, result.user);
}).catch(function(error) {
if (error.code === 'auth/credential-already-in-use') {
const prevUser = firebase.auth().currentUser;
const credential = error.credential;
deletePreviousUser(prevUser, credential)
.then(function (user) {
accountLinked(linkCallback, user);
}).catch(function(error) {
callIfFunction(linkCallback, false, error);
});
} else {
callIfFunction(linkCallback, false, error);
}
});
},
unlinkGoogle: function (unlinkCallback) {
firebase.auth().currentUser.unlink(googleProvider.providerId)
.then(function () {
callIfFunction(unlinkCallback, true);
}).catch(function (error) {
callIfFunction(unlinkCallback, false, error);
});
},
isGoogleLinked: function (linked) {
firebase.auth().onAuthStateChanged(function (user) {
if (!$.isFunction(linked))
return;
if (user) {
linked(isProvider(user.providerData, googleProvider.providerId));
} else {
linked(); // Trigger hiding the button
}
});
}
};
}
export default {
link,<|fim▁hole|>}<|fim▁end|> | login |
<|file_name|>tools.py<|end_file_name|><|fim▁begin|>'''
Created on 15.11.2021
@author: michael
'''
from alexandriabase.daos import DocumentDao, DaoModule, DOCUMENT_TABLE,\
DocumentFileInfoDao
from injector import Injector, inject
from alexandriabase import AlexBaseModule
from alexandriabase.services import ServiceModule, DocumentFileManager,\
DocumentFileNotFound, THUMBNAIL, FileProvider, ReferenceService
from sqlalchemy.sql.expression import or_, and_
from alexandriabase.base_exceptions import NoSuchEntityException
from datetime import date
from os.path import exists
import re
def tex_sanitizing(text: str) -> str:
text = text.replace("&", "\\&")
text = text.replace("#", "\\#")
return text
class PlakatExporter:
@inject
def __init__(self, dao: DocumentDao,
file_info_dao: DocumentFileInfoDao,
file_manager: DocumentFileManager,
file_provider: FileProvider,
reference_service: ReferenceService):
self.dao = dao
self.file_info_dao = file_info_dao
self.file_manager = file_manager
self.file_provider = file_provider
self.reference_service = reference_service
self.titel = "Plakate im ASB"
def export_to_tex(self):
self.open_file()
for record in self.fetch_records():<|fim▁hole|> def print_record(self, record, events):
if self.filtered(record, events):
return
self.file.write("\n\n\\section*{Dokumentnr. %d}" % record.id)
self.file.write("\n\nBeschreibung: %s" % tex_sanitizing(record.description))
if record.condition is not None and record.condition.strip() != "":
self.file.write("\n\nZusätzliche Infos: %s" % tex_sanitizing(record.condition))
self.print_events(events)
self.print_img(record.id)
def fetch_records(self):
condition = DOCUMENT_TABLE.c.doktyp == 9
return self.dao.find(condition)
def filtered(self, record, events):
return False
def print_events(self, events):
if len(events) == 0:
return
if len(events) == 1:
self.file.write("\n\n\\subsection*{Verknüpftes Ereignis}")
else:
self.file.write("\n\n\\subsection*{Verknüpfte Ereignisse}")
for event in events:
self.file.write("\n\n%s: %s" % (event.daterange, tex_sanitizing(event.description)))
def print_img(self, id):
try:
file_info = self.file_info_dao.get_by_id(id)
file_name = self.file_manager.get_generated_file_path(file_info, THUMBNAIL)
if not exists(file_name):
print("Generating file %s" % file_name)
self.file_provider.get_thumbnail(file_info)
self.file.write("\n\n\\vspace{0.5cm}")
self.file.write("\n\n\\includegraphics[width=7.0cm]{%s}\n" % file_name)
except NoSuchEntityException:
self.file.write("\n\nEintrag nicht gefunden!")
except DocumentFileNotFound:
self.file.write("\n\nDokumentdatei nicht gefunden!")
except OSError as e:
print(e)
print("Error on document %d" % id)
def open_file(self):
self.file = open("/tmp/plakate.tex", "w")
self.file.write("\\documentclass[german, a4paper, 12pt, twocolums]{article}\n")
self.file.write("\\usepackage[utf8]{inputenc}\n")
self.file.write("\\usepackage[T1]{fontenc}\n")
self.file.write("\\usepackage{graphicx}\n")
self.file.write("\\setlength{\\parindent}{0cm}\n")
self.file.write("\\special{papersize=29.7cm,21cm}\n")
self.file.write("\\usepackage{geometry}\n")
self.file.write("\\geometry{verbose,body={29.7cm,21cm},tmargin=1.5cm,bmargin=1.5cm,lmargin=1cm,rmargin=1cm}\n")
self.file.write("\\begin{document}\n")
self.file.write("\\sloppy\n")
self.file.write("\\title{%s}\n" % self.titel)
self.file.write("\\author{Archiv Soziale Bewegungen e.V.}\n")
self.file.write("\\date{Stand: %s}\n" % date.today())
self.file.write("\\maketitle\n\n")
self.file.write("\\twocolumn\n\n")
def close_file(self):
self.file.write("\\end{document}\n")
self.file.close()
class FemPlakatExporter(PlakatExporter):
def open_file(self):
self.titel = "Plakate zur Neuen Frauenbewegung\\linebreak{}(vor 1990 oder Entstehung nicht bestimmt)"
PlakatExporter.open_file(self)
#def filtered(self, record, events):
# if record.condition is not None and re.compile(r".*(199\d|20\d\d).*").match(record.condition):
# return True
# if len(events) == 0:
# return False
# for event in events:
# if event.id < 1990000000:
# return False
# return True
def fetch_records(self):
condition = and_(DOCUMENT_TABLE.c.doktyp == 9,
or_(DOCUMENT_TABLE.c.standort.like("7%"),
DOCUMENT_TABLE.c.standort.like("23%")))
return self.dao.find(condition)
if __name__ == '__main__':
injector = Injector([AlexBaseModule, DaoModule, ServiceModule])
exporter = injector.get(FemPlakatExporter)
exporter.export_to_tex()<|fim▁end|> | events = self.reference_service.get_events_referenced_by_document(record)
self.print_record(record, events)
self.close_file()
|
<|file_name|>WelcomePanel.java<|end_file_name|><|fim▁begin|>package view;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.Observable;
import java.util.Observer;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JPanel;
import model.BouncingProjectile;
import model.FriendlyShip;
import model.Projectile;
import controler.GameControler;
@SuppressWarnings("serial")
public class WelcomePanel extends JPanel implements Observer {
//Panel used to show the title and sends you to the gamepanel after you pressed enter.
private MainFrame mainFrame;
private GameControler controler;
private boolean loadingCompleted, reloading;<|fim▁hole|> public WelcomePanel(MainFrame newMainFrame, GameControler newControler){
mainFrame = newMainFrame;
controler = newControler;
loadingCompleted = false;
reloading = false;
setPreferredSize(new Dimension(500,630));
setLayout(new FlowLayout());
setBackground(Color.black);
JLabel titleLabel = new JLabel("WELCOME TO SPACE WARS!", JLabel.CENTER);
titleLabel.setPreferredSize(new Dimension(480,60));
titleLabel.setFont(new Font("Ariel", Font.BOLD, 33));
titleLabel.setForeground(Color.red);
JLabel instructionsLabel = new JLabel("<html>Fight with your battleships against the battleships of the enemy! "
+ "Move your ship by selecting it and move it to a location by click with the left mouse button. If you get close enaugh to an enemy, "
+ "then it will automaticly fire a projectile towards him. If you press with your right mouse button, "
+ "then a bouncing projectile will be fired. It is twice as big, bounces 3 times against the walls "
+ "and will stay longer in the battlefield then a regular projectile. If a ship is above the moon and fires a projectile, "
+ "then 8 projectiles will be shot as a bonus. You can change the speed of the game by moving the slider, "
+ "and enable or disable the sounds with the checkbox. You win the level if you defeat the ships of the enemy "
+ "before they defeat yours. If you get defeated, then you can reset the level. If you make it through all the levels, "
+ "then you have completed the game.</html>", JLabel.CENTER);
instructionsLabel.setPreferredSize(new Dimension(480,480));
instructionsLabel.setFont(new Font("Ariel", Font.BOLD, 20));
instructionsLabel.setForeground(Color.gray);
loadingLabel = new JLabel("Loading the levels...", JLabel.CENTER);
loadingLabel.setPreferredSize(new Dimension(480,50));
loadingLabel.setFont(new Font("Ariel", Font.BOLD, 30));
loadingLabel.setForeground(Color.green);
add(titleLabel);
add(instructionsLabel);
add(loadingLabel);
//load friendlyShipImage
friendlyShipImage = new ImageIcon("resources/chaser.png").getImage();
//start the game when the player presses enter and the loading is completed.
addKeyListener(new KeyAdapter(){
public void keyPressed(KeyEvent evt){
if(evt.getKeyCode() == KeyEvent.VK_ENTER && loadingCompleted){
mainFrame.startGame();
}
}
});
setFocusable(true);
}
/*Called from mainFrame after the welcomePanel is set to visible.*/
public void startLoading(){
controler.loadLevelData();
}
/*To reload te levels, some changes are needed. the reloading is used so the gamePanel
* doesn;t get re-intialized again, and a different text is nicer.*/
public void prepaireReloadingLevels(){
loadingCompleted = false;
reloading = true;
loadingLabel.setText("Reloading the levels...");
}
/*Paint the background and the ship and bouncing projectiles.*/
public void paintComponent(Graphics g){
super.paintComponent(g);
try{
Projectile moon = controler.getBackgroundMoonFromBattleField();
if(moon != null){
g.setColor(moon.getProjectileColor());
g.fillOval(moon.x, moon.y, moon.width, moon.height);
}
for(Projectile star:controler.getBackgroundStarsFromBattleField()){
g.setColor(star.getProjectileColor());
g.fillRect(star.x, star.y, star.width, star.height);
}
for(BouncingProjectile shot:controler.getWelcomeAnimationBouncingProjectiles()){
g.setColor(shot.getProjectileColor());
g.fillOval(shot.x, shot.y, shot.width, shot.height);
}
}catch(Exception e){}
FriendlyShip friendlyShip = controler.getWelcomeAnimationFriendlyShip();
g.drawImage(friendlyShipImage, friendlyShip.getLocation().x, friendlyShip.getLocation().y, null);
}
/*Gets called in the welcomeanimationloop and backgroundloop to repaint after a changehappened.
* Also used by the controler f the battlefield has loaded the levels so the player can press enter and begin.*/
@Override
public void update(Observable arg0, Object arg1) {
String command = (String) arg1;
if(command!=null && command.equals("levelsLoaded")){
//this panel will be reloading if the user visits the panel for the 2nd time.
if(!reloading){
mainFrame.setGamePanel(controler.getAndIntializeGamePanel());
}
loadingCompleted = true;
reloading = false;
loadingLabel.setText("Press ENTER to start the game.");
}
repaint();
}
}<|fim▁end|> | private JLabel loadingLabel;
private Image friendlyShipImage; |
Subsets and Splits