prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>IntWithMaxTest.java<|end_file_name|><|fim▁begin|>package xigua.battle.of.elements.model;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class IntWithMaxTest {
private IntWithMax intWithMax;
@Before
public void setUp() {
intWithMax = new IntWithMax(42);
}
@Test
public void getValue_WithCorrectValue() {
assertThat(intWithMax.getValue()).isEqualTo(42);
}
@Test
public void setWhenValueBiggerThanMax_CorrectValueSet() {
intWithMax.setValue(100);
assertThat(intWithMax.getValue()).isEqualTo(42);
}
@Test
public void setWhenValueSmallerThanMax_CorrectValueSet() {
intWithMax.setValue(1);
assertThat(intWithMax.getValue()).isEqualTo(1);
}
@Test
public void getMax_WithCorrectMax() {<|fim▁hole|><|fim▁end|>
|
assertThat(intWithMax.getMaxValue()).isEqualTo(42);
}
}
|
<|file_name|>download_data_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for download_data."""
from unittest import mock
from google.protobuf import text_format
import tensorflow as tf
from tensorboard.backend.event_processing import plugin_event_multiplexer
from tensorboard.plugins import base_plugin
from tensorboard.plugins.hparams import api_pb2
from tensorboard.plugins.hparams import backend_context
from tensorboard.plugins.hparams import download_data
EXPERIMENT = """
description: 'Test experiment'
user: 'Test user'
hparam_infos: [
{
name: 'initial_temp'
type: DATA_TYPE_FLOAT64
},
{
name: 'final_temp'
type: DATA_TYPE_FLOAT64
},
{ name: 'string_hparam' },
{ name: 'bool_hparam' },<|fim▁hole|> { name: { tag: 'current_temp' } },
{ name: { tag: 'delta_temp' } },
{ name: { tag: 'optional_metric' } }
]
"""
SESSION_GROUPS = """
session_groups {
name: "group_1"
hparams { key: "bool_hparam" value { bool_value: true } }
hparams { key: "final_temp" value { number_value: 150.0 } }
hparams { key: "initial_temp" value { number_value: 270.0 } }
hparams { key: "string_hparam" value { string_value: "a string" } }
metric_values {
name { tag: "current_temp" }
value: 10
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" } value: 15
training_step: 2
wall_time_secs: 10.0
}
metric_values { name { tag: "optional_metric" } value: 33
training_step: 20
wall_time_secs: 2.0
}
sessions {
name: "session_1"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_SUCCESS
metric_values {
name { tag: "current_temp" }
value: 10
training_step: 1
wall_time_secs: 1.0
}
metric_values {
name { tag: "delta_temp" }
value: 15
training_step: 2
wall_time_secs: 10.0
}
metric_values {
name { tag: "optional_metric" }
value: 33
training_step: 20
wall_time_secs: 2.0
}
}
}
session_groups {
name: "group_2"
hparams { key: "bool_hparam" value { bool_value: false } }
hparams { key: "final_temp" value { number_value: 100.0 } }
hparams { key: "initial_temp" value { number_value: 280.0 } }
hparams { key: "string_hparam" value { string_value: "AAAAA"}}
metric_values {
name { tag: "current_temp" }
value: 51.0
training_step: 1
wall_time_secs: 1.0
}
metric_values {
name { tag: "delta_temp" }
value: 44.5
training_step: 2
wall_time_secs: 10.3333333
}
sessions {
name: "session_2"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_SUCCESS
metric_values {
name { tag: "current_temp" }
value: 100
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" }
value: 150
training_step: 3
wall_time_secs: 11.0
}
}
sessions {
name: "session_3"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_FAILURE
metric_values {
name { tag: "current_temp" }
value: 1.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" }
value: 1.5
training_step: 2
wall_time_secs: 10.0
}
}
sessions {
name: "session_5"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_SUCCESS
metric_values {
name { tag: "current_temp" }
value: 52.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" }
value: -18
training_step: 2
wall_time_secs: 10.0
}
}
}
session_groups {
name: "group_3"
hparams { key: "bool_hparam" value { bool_value: true } }
hparams { key: "final_temp" value { number_value: 0.000012 } }
hparams { key: "initial_temp" value { number_value: 300.0 } }
hparams { key: "string_hparam" value { string_value: "a string_3"}}
hparams {
key: 'optional_string_hparam' value { string_value: 'BB' }
}
metric_values {
name { tag: "current_temp" }
value: 101.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" } value: -15100000.0
training_step: 2
wall_time_secs: 10.0
}
sessions {
name: "session_4"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_UNKNOWN
metric_values {
name { tag: "current_temp" }
value: 101.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" } value: -151000000.0
training_step: 2
wall_time_secs: 10.0
}
}
}
total_size: 3
"""
EXPECTED_LATEX = r"""\begin{table}[tbp]
\begin{tabular}{llllllll}
initial\_temp & final\_temp & string\_hparam & bool\_hparam & optional\_string\_hparam & current\_temp & delta\_temp & optional\_metric \\ \hline
$270$ & $150$ & a string & $1$ & & $10$ & $15$ & $33$ \\
$280$ & $100$ & AAAAA & $0$ & & $51$ & $44.5$ & - \\
$300$ & $1.2\cdot 10^{-5}$ & a string\_3 & $1$ & BB & $101$ & $-1.51\cdot 10^{7}$ & - \\
\hline
\end{tabular}
\end{table}
"""
EXPECTED_CSV = """initial_temp,final_temp,string_hparam,bool_hparam,optional_string_hparam,current_temp,delta_temp,optional_metric\r
270.0,150.0,a string,True,,10.0,15.0,33.0\r
280.0,100.0,AAAAA,False,,51.0,44.5,\r
300.0,1.2e-05,a string_3,True,BB,101.0,-15100000.0,\r
"""
class DownloadDataTest(tf.test.TestCase):
def setUp(self):
self._mock_multiplexer = mock.create_autospec(
plugin_event_multiplexer.EventMultiplexer
)
self._mock_tb_context = base_plugin.TBContext(
multiplexer=self._mock_multiplexer
)
def _run_handler(self, experiment, session_groups, response_format):
experiment_proto = text_format.Merge(experiment, api_pb2.Experiment())
session_groups_proto = text_format.Merge(
session_groups, api_pb2.ListSessionGroupsResponse()
)
num_columns = len(experiment_proto.hparam_infos) + len(
experiment_proto.metric_infos
)
handler = download_data.Handler(
backend_context.Context(self._mock_tb_context),
experiment_proto,
session_groups_proto,
response_format,
[True] * num_columns,
)
return handler.run()
def test_csv(self):
body, mime_type = self._run_handler(
EXPERIMENT, SESSION_GROUPS, download_data.OutputFormat.CSV
)
self.assertEqual("text/csv", mime_type)
self.assertEqual(EXPECTED_CSV, body)
def test_latex(self):
body, mime_type = self._run_handler(
EXPERIMENT, SESSION_GROUPS, download_data.OutputFormat.LATEX
)
self.assertEqual("application/x-latex", mime_type)
self.assertEqual(EXPECTED_LATEX, body)
def test_json(self):
body, mime_type = self._run_handler(
EXPERIMENT, SESSION_GROUPS, download_data.OutputFormat.JSON
)
self.assertEqual("application/json", mime_type)
expected_result = {
"header": [
"initial_temp",
"final_temp",
"string_hparam",
"bool_hparam",
"optional_string_hparam",
"current_temp",
"delta_temp",
"optional_metric",
],
"rows": [
[270.0, 150.0, "a string", True, "", 10.0, 15.0, 33.0],
[280.0, 100.0, "AAAAA", False, "", 51.0, 44.5, None],
[
300.0,
1.2e-05,
"a string_3",
True,
"BB",
101.0,
-15100000.0,
None,
],
],
}
self.assertEqual(expected_result, body)
if __name__ == "__main__":
tf.test.main()<|fim▁end|>
|
{ name: 'optional_string_hparam' }
]
metric_infos: [
|
<|file_name|>api2db.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
#!/usr/bin/env python
# ############################################################################
# ########## Libraries #############
# ##################################
# Standard library
import logging
from os import path<|fim▁hole|>
# 3rd party modules
import arrow
from isogeo_pysdk import Isogeo
# Django project
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from isogeo_notify.models import Metadata, Workgroup
# ############################################################################
# ########## Globals ##############
# #################################
# logger = logging.getLogger("ElPaso")
# ############################################################################
# ########### Classes #############
# #################################
class Command(BaseCommand):
args = '<foo bar ...>'
help = 'our help string comes here'
def _update_db(self):
"""Update metadata list from API."""
# get stored metadata
db_mds = Metadata.objects.all()
db_wgs = Workgroup.objects.all()
# connect to isogeo
isogeo = Isogeo(client_id=settings.ISOGEO_CLIENT_ID,
client_secret=settings.ISOGEO_CLIENT_SECRET,
lang="fr")
token = isogeo.connect()
search = isogeo.search(token,
# page_size=10,
order_by="modified",
# whole_share=0,
# sub_resources=["events"]
)
# tags
tags = search.get("tags")
for tag in tags:
if tag.startswith("owner"):
new_owner = Workgroup(isogeo_uuid=tag[6:-1],
label=tags.get(tag))
new_owner.save()
# metadatas
# for md in search.get("results"):
# try:
# new_md = Metadata(isogeo_id=md.get("_id"),
# title=md.get("title", "No title"),
# name=md.get("name"),
# abstract=md.get("abstract"),
# md_dt_crea=md.get("_created"),
# md_dt_update=md.get("_modified"),
# rs_dt_crea=md.get("created"),
# rs_dt_update=md.get("modified"),
# source=True)
# new_md.save()
# logging.info("Metadata added")
# except IntegrityError:
# # in case of duplicated offer
# logging.error("Metadata already existed")
# continue
logging.info("{} metadata added")
def handle(self, *args, **options):
self._update_db()<|fim▁end|>
| |
<|file_name|>uploadhandler.py<|end_file_name|><|fim▁begin|>"""
Upload handlers to test the upload API.
"""
from django.core.files.uploadhandler import (
FileUploadHandler, StopUpload, TemporaryFileUploadHandler,
)
class QuotaUploadHandler(FileUploadHandler):
"""
This test upload handler terminates the connection if more than a quota
(5MB) is uploaded.
"""
QUOTA = 5 * 2 ** 20 # 5 MB
def __init__(self, request=None):
super().__init__(request)
self.total_upload = 0
def receive_data_chunk(self, raw_data, start):
self.total_upload += len(raw_data)
if self.total_upload >= self.QUOTA:
raise StopUpload(connection_reset=True)
return raw_data
def file_complete(self, file_size):
return None
class StopUploadTemporaryFileHandler(TemporaryFileUploadHandler):
"""A handler that raises a StopUpload exception."""
def receive_data_chunk(self, raw_data, start):
raise StopUpload()
class CustomUploadError(Exception):
pass
class ErroringUploadHandler(FileUploadHandler):
"""A handler that raises an exception."""<|fim▁hole|> raise CustomUploadError("Oops!")<|fim▁end|>
|
def receive_data_chunk(self, raw_data, start):
|
<|file_name|>Utils.ts<|end_file_name|><|fim▁begin|>module Utils {
export class Generator {
public static newGuid() {
var guid = "";
for (var i = 1; i <= 32; i++) {
var n = Math.floor(Math.random() * 16.0).toString(16);
guid += n;
if ((i == 8) || (i == 12) || (i == 16) || (i == 20))
guid += "-";
}
return guid;
}
}<|fim▁hole|> return regInt.test(val);
}
public static resolveAlertMsg(val: string, isError: boolean): string {
var result;
var msgFormat;
if (isError) {
msgFormat = '{0}[[[失败]]]';
} else {
msgFormat = '{0}[[[成功]]]';
}
switch (val) {
case 'Submit':
case 'ReSubmit':
result = '[[[提交]]]';
break;
case 'Save':
result = '[[[保存]]]';
break;
case 'Return':
result = '[[[退回]]]';
break;
case 'Approve':
result = '[[[审批]]]';
break;
case 'Decline':
result = '[[[拒绝]]]';
break;
case 'Recall':
result = '[[[撤回]]]';
break;
default:
result = '[[[操作]]]';
}
return Common.format.call(this, msgFormat, result);
}
public static isUserAction(from: string): boolean {
var isUserAction = false;
if (from
&& from == 'useraction') {
isUserAction = true;
}
return isUserAction;
}
public static getParameterByName(name: string): string {
name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]");
var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"),
results = regex.exec(location.hash ? location.hash : location.search);
return results === null ? "" : decodeURIComponent(results[1].replace(/\+/g, " "));
}
public static filterDefaultDate(val: Date): Date {
var result = null;
var valMoment = moment(val);
if (valMoment.isValid()
&& valMoment.year() != 1900) {
result = val;
}
return result;
}
public static format(): string {
if (arguments.length == 0)
return null;
var str = arguments[0];
for (var i = 1; i < arguments.length; i++) {
var re = new RegExp('\\{' + (i - 1) + '\\}', 'gm');
str = str.replace(re, arguments[i]);
}
return str;
}
public static GetQueryString(key: string): string {
var result = "";
if (document.URL.indexOf("?") > 0) {
var query = document.URL.substr(document.URL.indexOf("?") + 1).split("&");
for (var i = 0, len = query.length; i < len; i++) {
var keyVal = query[i].split("=");
if (keyVal[0].toLowerCase() == key.toLowerCase()) {
result = keyVal[1];
break;
}
}
}
return result;
}
}
///Javascript中的浮点数精确计算
///Author: Stephen.Wang
///Date: 2014-07-09
export class caculator {
//加法
public static plus(a: number, b: number): number {
if (!a) {
a = 0;
};
if (!b) {
b = 0;
}
var s1 = a.toString(), s2 = b.toString(),
m1 = s1.indexOf(".") > 0 ? s1.length - s1.indexOf(".") - 1 : 0,
m2 = s2.indexOf(".") > 0 ? s2.length - s2.indexOf(".") - 1 : 0,
m = Math.pow(10, Math.max(m1, m2));
return (caculator.multiply(a, m) + caculator.multiply(b, m)) / m;
}
//乘法
public static multiply(a, b): number {
if (!a) {
a = 0;
};
if (!b) {
b = 0;
}
var s1 = a.toString(), s2 = b.toString(),
m1 = s1.indexOf(".") > 0 ? s1.length - s1.indexOf(".") - 1 : 0,
m2 = s2.indexOf(".") > 0 ? s2.length - s2.indexOf(".") - 1 : 0;
return Number(s1.replace(".", "")) * Number(s2.replace(".", "")) / Math.pow(10, m1 + m2);
}
///减法
public static subtract(a, b): number {
return caculator.plus(a, -b);
}
///除法
public static division(a, b): number {
//return caculator.multiply(a, 1 / b);
return a * 1 / b;
}
}
export class ServiceURI {
//webApi地址
public static Address() {
return "http://172.24.130.43:10083/";
//本机
return "http://localhost:10083/";
}
//web服务器地址
public static WebAddress() {
return "http://172.24.130.43:10082/";
//本机
return "http://localhost:10082/";
}
//基础框架Api地址
public static FrameAddress() {
return "http://172.24.130.43:10080/";
}
//基础框架Web地址
public static FrameWebAddress() {
return "http://172.24.130.43:10081/";
}
//附件服务器地址
public static AttachmentAddress() {
return "http://1.1.2.5:9000/PMT/upload?action=download&&fileName=";
}
public static AppUri = window["AppUri"];
public static ApiDelegate = ServiceURI.AppUri + "ApiDelegate.ashx";
}
export class Constants {
public static ApiDelegate = Utils.ServiceURI.ApiDelegate;
public static BaseUri = Utils.ServiceURI.FrameWebAddress();
public static ServiceUri = Utils.ServiceURI.FrameAddress();
}
}
declare function escape(str: any);<|fim▁end|>
|
export class Common {
public static isInt(val: string): boolean {
var regInt = /^-?[0-9]+(\.0*)?$/;
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Contains everything related to the interface between glium and the OpenGL implementation.
use gl;
use libc;
use std::env;
use std::mem;
use std::ptr;
use std::borrow::Cow;
use std::collections::HashMap;
use std::cell::{Cell, RefCell, RefMut};
use std::marker::PhantomData;
use std::ffi::CStr;
use std::rc::Rc;
use GliumCreationError;
use SwapBuffersError;
use CapabilitiesSource;
use ContextExt;
use backend::Backend;
use version;
use version::Api;
use version::Version;
use fbo;
use ops;
use sampler_object;
use texture;
use uniforms;
use vertex_array_object;
pub use self::capabilities::{ReleaseBehavior, Capabilities};
pub use self::extensions::ExtensionsList;
pub use self::state::GlState;
mod capabilities;
mod extensions;
mod state;
/// Stores the state and information required for glium to execute commands. Most public glium
/// functions require passing a `Rc<Context>`.
pub struct Context {
/// Contains the pointers to OpenGL functions.
gl: gl::Gl,
/// The current state of the OpenGL state machine. Contains for example which buffer is binded
/// to which bind point, whether depth testing is activated, etc.
state: RefCell<GlState>,
/// Version of OpenGL of the backend.
version: Version,
/// Tells whether or not the backend supports each extension.
extensions: ExtensionsList,
/// Constants defined by the backend and retreived at initialization. For example, number
/// of texture units, maximum size of the viewport, etc.
capabilities: Capabilities,
/// Glue between glium and the code that handles windowing. Contains functions that allows
/// you to swap buffers, retreive the size of the framebuffer, etc.
backend: RefCell<Box<Backend>>,
/// Whether or not glium must check that the OpenGL context is the current one before each
/// call.
check_current_context: bool,
/// Whether or not errors triggered by ARB_debug_output (and similar extensions) should be
/// reported to the user (by panicking). This must be set to `false` in some situations,
/// like compiling/linking shaders.
report_debug_output_errors: Cell<bool>,
/// We maintain a cache of FBOs.
/// The `Option` is here in order to destroy the container. It must be filled at all time
/// is a normal situation.
framebuffer_objects: Option<fbo::FramebuffersContainer>,
/// We maintain a list of vertex array objecs.
vertex_array_objects: vertex_array_object::VertexAttributesSystem,
/// We maintain a list of samplers for each possible behavior.
samplers: RefCell<HashMap<uniforms::SamplerBehavior, sampler_object::SamplerObject>>,
/// List of texture handles that are resident. We need to call `MakeTextureHandleResidentARB`
/// when rebuilding the context.
resident_texture_handles: RefCell<Vec<gl::types::GLuint64>>,
/// List of images handles that are resident. We need to call `MakeImageHandleResidentARB`
/// when rebuilding the context.
resident_image_handles: RefCell<Vec<(gl::types::GLuint64, gl::types::GLenum)>>,
}
/// This struct is a guard that is returned when you want to access the OpenGL backend.
pub struct CommandContext<'a> {
/// Source of OpenGL function pointers.
pub gl: &'a gl::Gl,
/// Refers to the state of the OpenGL backend. Maintained between multiple calls.
/// **Must** be synchronized with the real state of the backend.
pub state: RefMut<'a, GlState>,
/// Version of the backend.
pub version: &'a Version,
/// Extensions supported by the backend.
pub extensions: &'a ExtensionsList,
/// Capabilities of the backend.
pub capabilities: &'a Capabilities,
/// Whether or not errors triggered by ARB_debug_output (and similar extensions) should be
/// reported to the user (by panicking).
pub report_debug_output_errors: &'a Cell<bool>,
/// The list of vertex array objects.
pub vertex_array_objects: &'a vertex_array_object::VertexAttributesSystem,
/// The list of framebuffer objects.
pub framebuffer_objects: &'a fbo::FramebuffersContainer,
/// The list of samplers.
pub samplers: RefMut<'a, HashMap<uniforms::SamplerBehavior, sampler_object::SamplerObject>>,
/// List of texture handles that need to be made resident.
pub resident_texture_handles: RefMut<'a, Vec<gl::types::GLuint64>>,
/// List of image handles and their access that need to be made resident.
pub resident_image_handles: RefMut<'a, Vec<(gl::types::GLuint64, gl::types::GLenum)>>,
/// This marker is here to prevent `CommandContext` from implementing `Send`
// TODO: use this when possible
//impl<'a, 'b> !Send for CommandContext<'a, 'b> {}
marker: PhantomData<*mut u8>,
}
impl Context {
/// Builds a new context.
///
/// The `check_current_context` parameter tells the context whether it should check
/// if the backend's OpenGL context is the current one before each OpenGL operation.
///
/// If you pass `false`, you must ensure that no other OpenGL context is going to be made
/// current in the same thread as this context. Passing `true` makes things safe but
/// is slightly slower.
///
/// The OpenGL context must be newly-created. If you make modifications to the context before
/// passing it to this function, glium's state cache may mismatch the actual one.
///
pub unsafe fn new<B, E>(backend: B, check_current_context: bool)
-> Result<Rc<Context>, GliumCreationError<E>>
where B: Backend + 'static
{
backend.make_current();
let gl = gl::Gl::load_with(|symbol| backend.get_proc_address(symbol));
let gl_state: RefCell<GlState> = RefCell::new(Default::default());
let version = version::get_gl_version(&gl);
let extensions = extensions::get_extensions(&gl, &version);
let capabilities = capabilities::get_capabilities(&gl, &version, &extensions);
let report_debug_output_errors = Cell::new(true);
let vertex_array_objects = vertex_array_object::VertexAttributesSystem::new();
let framebuffer_objects = fbo::FramebuffersContainer::new();
let samplers = RefCell::new(HashMap::with_capacity(16));
let resident_texture_handles = RefCell::new(Vec::new());
let resident_image_handles = RefCell::new(Vec::new());
// checking whether the backend supports glium
// TODO: do this more properly
{
let mut ctxt = CommandContext {
gl: &gl,
state: gl_state.borrow_mut(),
version: &version,
extensions: &extensions,
capabilities: &capabilities,
report_debug_output_errors: &report_debug_output_errors,
vertex_array_objects: &vertex_array_objects,
framebuffer_objects: &framebuffer_objects,
samplers: samplers.borrow_mut(),
resident_texture_handles: resident_texture_handles.borrow_mut(),
resident_image_handles: resident_image_handles.borrow_mut(),
marker: PhantomData,
};
try!(check_gl_compatibility(&mut ctxt));
}
let context = Rc::new(Context {
gl: gl,
state: gl_state,
version: version,
extensions: extensions,
capabilities: capabilities,
report_debug_output_errors: report_debug_output_errors,
backend: RefCell::new(Box::new(backend)),
check_current_context: check_current_context,
framebuffer_objects: Some(framebuffer_objects),
vertex_array_objects: vertex_array_objects,
samplers: samplers,
resident_texture_handles: resident_texture_handles,
resident_image_handles: resident_image_handles,
});
init_debug_callback(&context);
// making sure that an error wasn't triggered during initialization
{
let mut ctxt = context.make_current();
if ::get_gl_error(&mut ctxt).is_some() {
println!("glium has triggered an OpenGL error during initialization. Please report \
this error: https://github.com/tomaka/glium/issues");
}
/*assert!(::get_gl_error(&mut ctxt).is_none(),
"glium has triggered an OpenGL error during initialization. Please report \
this error: https://github.com/tomaka/glium/issues");*/
}
Ok(context)
}
/// Calls `get_framebuffer_dimensions` on the backend object stored by this context.
#[inline]
pub fn get_framebuffer_dimensions(&self) -> (u32, u32) {
self.backend.borrow().get_framebuffer_dimensions()
}
/// Changes the OpenGL context associated with this context.
///
/// The new context **must** have lists shared with the old one.
pub unsafe fn rebuild<B, E>(&self, new_backend: B)
-> Result<(), GliumCreationError<E>>
where B: Backend + 'static
{
// framebuffer objects and vertex array objects aren't shared,
// so we have to destroy them
{
let mut ctxt = self.make_current();
fbo::FramebuffersContainer::purge_all(&mut ctxt);
vertex_array_object::VertexAttributesSystem::purge_all(&mut ctxt);
}
new_backend.make_current();
*self.state.borrow_mut() = Default::default();
// FIXME: verify version, capabilities and extensions
*self.backend.borrow_mut() = Box::new(new_backend);
// making textures resident
let textures = self.resident_texture_handles.borrow();
for &texture in textures.iter() {
self.gl.MakeTextureHandleResidentARB(texture);
}
// making images resident
let images = self.resident_image_handles.borrow();
for &(image, access) in images.iter() {
self.gl.MakeImageHandleResidentARB(image, access);
}
Ok(())
}
/// Swaps the buffers in the backend.
pub fn swap_buffers(&self) -> Result<(), SwapBuffersError> {
let mut state = self.state.borrow_mut();
if state.lost_context {
return Err(SwapBuffersError::ContextLost);
}
let backend = self.backend.borrow();
if self.check_current_context {
if !backend.is_current() {
unsafe { backend.make_current() };
}
}
// swapping
let err = backend.swap_buffers();
if let Err(SwapBuffersError::ContextLost) = err {
state.lost_context = true;
}
err
}
/// DEPRECATED. Use `get_opengl_version` instead.
#[inline]
pub fn get_version(&self) -> &Version {
&self.version
}
/// Returns the OpenGL version detected by this context.
#[inline]
pub fn get_opengl_version(&self) -> &Version {
&self.version
}
/// Returns the GLSL version guaranteed to be supported.
#[inline]
pub fn get_supported_glsl_version(&self) -> Version {
version::get_supported_glsl_version(self.get_version())
}
/// Returns true if the given GLSL version is supported.
#[inline]
pub fn is_glsl_version_supported(&self, version: &Version) -> bool {
self.capabilities().supported_glsl_versions.iter().find(|&v| v == version).is_some()
}
/// Returns true if out-of-bound buffer access from the GPU side (inside a program) cannot
/// result in a crash.
///
/// You should take extra care if `is_robust` returns false.
#[inline]
pub fn is_robust(&self) -> bool {
self.capabilities().robustness
}
/// Returns true if a context loss is possible.
#[inline]
pub fn is_context_loss_possible(&self) -> bool {
self.capabilities().can_lose_context
}
/// Returns true if the context has been lost and needs to be recreated.
///
/// # Implementation
///
/// If it has been determined that the context has been lost before, then the function
/// immediatly returns true. Otherwise, calls `glGetGraphicsResetStatus`. If this function
/// is not available, returns false.
pub fn is_context_lost(&self) -> bool {
if self.state.borrow().lost_context {
return true;
}
let mut ctxt = self.make_current();
let lost = if ctxt.version >= &Version(Api::Gl, 4, 5) ||
ctxt.version >= &Version(Api::GlEs, 3, 2) ||
ctxt.extensions.gl_khr_robustness
{
unsafe { ctxt.gl.GetGraphicsResetStatus() != gl::NO_ERROR }
} else if ctxt.extensions.gl_ext_robustness {
unsafe { ctxt.gl.GetGraphicsResetStatusEXT() != gl::NO_ERROR }
} else if ctxt.extensions.gl_arb_robustness {
unsafe { ctxt.gl.GetGraphicsResetStatusARB() != gl::NO_ERROR }
} else {
false
};
if lost { ctxt.state.lost_context = true; }
lost
}
/// Returns the behavior when the current OpenGL context is changed.
///
/// The most common value is `Flush`. In order to get `None` you must explicitely request it
/// during creation.
#[inline]
pub fn get_release_behavior(&self) -> ReleaseBehavior {
self.capabilities().release_behavior
}
/// Returns the maximum value that can be used for anisotropic filtering, or `None`
/// if the hardware doesn't support it.
#[inline]
pub fn get_max_anisotropy_support(&self) -> Option<u16> {
self.capabilities().max_texture_max_anisotropy.map(|v| v as u16)
}
/// Returns the maximum dimensions of the viewport.
///
/// Glium will panic if you request a larger viewport than this when drawing.
#[inline]
pub fn get_max_viewport_dimensions(&self) -> (u32, u32) {
let d = self.capabilities().max_viewport_dims;
(d.0 as u32, d.1 as u32)
}
/// Releases the shader compiler, indicating that no new programs will be created for a while.
///
/// This method is a no-op if it's not available in the implementation.
pub fn release_shader_compiler(&self) {
unsafe {
let ctxt = self.make_current();
if ctxt.version >= &Version(Api::GlEs, 2, 0) ||
ctxt.version >= &Version(Api::Gl, 4, 1)
{
if !ctxt.capabilities.supported_glsl_versions.is_empty() {
ctxt.gl.ReleaseShaderCompiler();
}
}
}
}
/// Returns an estimate of the amount of video memory available in bytes.
///
/// Returns `None` if no estimate is available.
pub fn get_free_video_memory(&self) -> Option<usize> {
unsafe {
let ctxt = self.make_current();
let mut value: [gl::types::GLint; 4] = mem::uninitialized();
<|fim▁hole|> Some(value[0] as usize * 1024)
} else if ctxt.extensions.gl_ati_meminfo {
ctxt.gl.GetIntegerv(gl::TEXTURE_FREE_MEMORY_ATI, &mut value[0]);
Some(value[0] as usize * 1024)
} else {
return None;
}
}
}
/// Reads the content of the front buffer.
///
/// You will only see the data that has finished being drawn.
///
/// This function can return any type that implements `Texture2dData`.
///
/// ## Example
///
/// ```no_run
/// # extern crate glium;
/// # extern crate glutin;
/// # fn main() {
/// # let display: glium::Display = unsafe { ::std::mem::uninitialized() };
/// let pixels: Vec<Vec<(u8, u8, u8, u8)>> = display.read_front_buffer();
/// # }
/// ```
pub fn read_front_buffer<T>(&self) -> T
where T: texture::Texture2dDataSink<(u8, u8, u8, u8)>
{
let mut ctxt = self.make_current();
let dimensions = self.get_framebuffer_dimensions();
let rect = ::Rect { left: 0, bottom: 0, width: dimensions.0, height: dimensions.1 };
let mut data = Vec::with_capacity(0);
ops::read(&mut ctxt, ops::Source::DefaultFramebuffer(gl::FRONT_LEFT), &rect, &mut data);
T::from_raw(Cow::Owned(data), dimensions.0, dimensions.1)
}
/// Execute an arbitrary closure with the OpenGL context active. Useful if another
/// component needs to directly manipulate OpenGL state.
///
/// **If `action` manipulates any OpenGL state, it must be restored before `action`
/// completes.**
#[inline]
pub unsafe fn exec_in_context<'a, T, F>(&self, action: F) -> T
where T: Send + 'static,
F: FnOnce() -> T + 'a
{
let _ctxt = self.make_current();
action()
}
/// Asserts that there are no OpenGL errors pending.
///
/// This function should be used in tests.
pub fn assert_no_error(&self, user_msg: Option<&str>) {
let mut ctxt = self.make_current();
match (::get_gl_error(&mut ctxt), user_msg) {
(Some(msg), None) => panic!("{}", msg),
(Some(msg), Some(user_msg)) => panic!("{} : {}", user_msg, msg),
(None, _) => ()
};
}
/// DEPRECATED. Renamed `finish`.
#[inline]
pub fn synchronize(&self) {
self.finish();
}
/// Calls `glFinish()`. This waits until all the previously issued commands have finished
/// being executed.
///
/// When you execute OpenGL functions, they are not executed immediately. Instead they are
/// put in a queue. This function flushes this queue, then waits until all commands
/// have finished being executed.
///
/// You normally don't need to call this function manually, except for debugging purposes.
#[inline]
pub fn finish(&self) {
let ctxt = self.make_current();
unsafe { ctxt.gl.Finish(); }
}
/// Calls `glFlush()`. This starts executing the commands that you have issued if it is not
/// yet the case.
///
/// When you execute OpenGL functions, they are not executed immediately. Instead they are
/// put in a queue. This function flushes this queue so that commands start being executed.
///
/// You normally don't need to call this function manually. Swapping buffers automatically
/// flushes the queue. This function can be useful if you want to benchmark the time it
/// takes from your OpenGL driver to process commands.
#[inline]
pub fn flush(&self) {
let ctxt = self.make_current();
unsafe { ctxt.gl.Flush(); }
}
/// Inserts a debugging string in the commands queue. If you use an OpenGL debugger, you will
/// be able to see that string.
///
/// This is helpful to understand where you are when you have big applications.
///
/// Returns `Err` if the backend doesn't support this functionnality. You can choose whether
/// to call `.unwrap()` if you want to make sure that it works, or `.ok()` if you don't care.
pub fn insert_debug_marker(&self, marker: &str) -> Result<(), ()> {
let ctxt = self.make_current();
if ctxt.extensions.gl_gremedy_string_marker {
let marker = marker.as_bytes();
unsafe { ctxt.gl.StringMarkerGREMEDY(marker.len() as gl::types::GLsizei,
marker.as_ptr() as *const _) };
Ok(())
} else if ctxt.extensions.gl_ext_debug_marker {
let marker = marker.as_bytes();
unsafe { ctxt.gl.InsertEventMarkerEXT(marker.len() as gl::types::GLsizei,
marker.as_ptr() as *const _) };
Ok(())
} else {
Err(())
}
}
/// Same as `insert_debug_marker`, except that if you don't compile with `debug_assertions`
/// it is a no-op and returns `Ok`.
#[inline]
pub fn debug_insert_debug_marker(&self, marker: &str) -> Result<(), ()> {
if cfg!(debug_assertions) {
self.insert_debug_marker(marker)
} else {
Ok(())
}
}
}
impl ContextExt for Context {
#[inline]
fn set_report_debug_output_errors(&self, value: bool) {
self.report_debug_output_errors.set(value);
}
fn make_current(&self) -> CommandContext {
if self.check_current_context {
let backend = self.backend.borrow();
if !backend.is_current() {
unsafe { backend.make_current() };
}
}
CommandContext {
gl: &self.gl,
state: self.state.borrow_mut(),
version: &self.version,
extensions: &self.extensions,
capabilities: &self.capabilities,
report_debug_output_errors: &self.report_debug_output_errors,
vertex_array_objects: &self.vertex_array_objects,
framebuffer_objects: self.framebuffer_objects.as_ref().unwrap(),
samplers: self.samplers.borrow_mut(),
resident_texture_handles: self.resident_texture_handles.borrow_mut(),
resident_image_handles: self.resident_image_handles.borrow_mut(),
marker: PhantomData,
}
}
#[inline]
fn capabilities(&self) -> &Capabilities {
&self.capabilities
}
}
impl CapabilitiesSource for Context {
#[inline]
fn get_version(&self) -> &Version {
&self.version
}
#[inline]
fn get_extensions(&self) -> &ExtensionsList {
&self.extensions
}
#[inline]
fn get_capabilities(&self) -> &Capabilities {
&self.capabilities
}
}
impl Drop for Context {
fn drop(&mut self) {
unsafe {
// this is the code of make_current duplicated here because we can't borrow
// `self` twice
if self.check_current_context {
let backend = self.backend.borrow();
if !backend.is_current() {
backend.make_current();
}
}
let mut ctxt = CommandContext {
gl: &self.gl,
state: self.state.borrow_mut(),
version: &self.version,
extensions: &self.extensions,
capabilities: &self.capabilities,
report_debug_output_errors: &self.report_debug_output_errors,
vertex_array_objects: &self.vertex_array_objects,
framebuffer_objects: self.framebuffer_objects.as_ref().unwrap(),
samplers: self.samplers.borrow_mut(),
resident_texture_handles: self.resident_texture_handles.borrow_mut(),
resident_image_handles: self.resident_image_handles.borrow_mut(),
marker: PhantomData,
};
fbo::FramebuffersContainer::cleanup(&mut ctxt);
vertex_array_object::VertexAttributesSystem::cleanup(&mut ctxt);
for (_, s) in mem::replace(&mut *ctxt.samplers, HashMap::with_capacity(0)) {
s.destroy(&mut ctxt);
}
// disabling callback
if ctxt.state.enabled_debug_output != Some(false) {
if ctxt.version >= &Version(Api::Gl, 4,5) || ctxt.extensions.gl_khr_debug {
ctxt.gl.Disable(gl::DEBUG_OUTPUT);
} else if ctxt.extensions.gl_arb_debug_output {
ctxt.gl.DebugMessageCallbackARB(mem::transmute(0usize),
ptr::null());
}
ctxt.state.enabled_debug_output = Some(false);
ctxt.gl.Finish();
}
}
}
}
impl<'a> CapabilitiesSource for CommandContext<'a> {
#[inline]
fn get_version(&self) -> &Version {
self.version
}
#[inline]
fn get_extensions(&self) -> &ExtensionsList {
self.extensions
}
#[inline]
fn get_capabilities(&self) -> &Capabilities {
self.capabilities
}
}
/// Checks whether the backend supports glium. Returns an `Err` if it doesn't.
fn check_gl_compatibility<T>(ctxt: &mut CommandContext) -> Result<(), GliumCreationError<T>> {
let mut result = Vec::with_capacity(0);
if !(ctxt.version >= &Version(Api::Gl, 1, 5)) &&
!(ctxt.version >= &Version(Api::GlEs, 2, 0)) &&
(!ctxt.extensions.gl_arb_vertex_buffer_object || !ctxt.extensions.gl_arb_map_buffer_range)
{
result.push("OpenGL implementation doesn't support buffer objects");
}
if !(ctxt.version >= &Version(Api::Gl, 2, 0)) &&
!(ctxt.version >= &Version(Api::GlEs, 2, 0)) &&
(!ctxt.extensions.gl_arb_shader_objects ||
!ctxt.extensions.gl_arb_vertex_shader || !ctxt.extensions.gl_arb_fragment_shader)
{
result.push("OpenGL implementation doesn't support vertex/fragment shaders");
}
if !ctxt.extensions.gl_ext_framebuffer_object && !(ctxt.version >= &Version(Api::Gl, 3, 0)) &&
!(ctxt.version >= &Version(Api::GlEs, 2, 0)) && !ctxt.extensions.gl_arb_framebuffer_object
{
result.push("OpenGL implementation doesn't support framebuffers");
}
if !ctxt.extensions.gl_ext_framebuffer_blit && !(ctxt.version >= &Version(Api::Gl, 3, 0)) &&
!(ctxt.version >= &Version(Api::GlEs, 2, 0))
{
result.push("OpenGL implementation doesn't support blitting framebuffers");
}
if result.len() == 0 {
Ok(())
} else {
Err(GliumCreationError::IncompatibleOpenGl(result.connect("\n")))
}
}
/// Initializes `GL_KHR_debug`, `GL_ARB_debug`, or a similar extension so that the debug output
/// is reported.
fn init_debug_callback(context: &Rc<Context>) {
if !cfg!(debug_assertions) {
return;
}
// TODO: remove this
if env::var("GLIUM_DISABLE_DEBUG_OUTPUT").is_ok() {
return;
}
// this is the C callback
extern "system" fn callback_wrapper(source: gl::types::GLenum, ty: gl::types::GLenum,
id: gl::types::GLuint, severity: gl::types::GLenum,
_length: gl::types::GLsizei,
message: *const gl::types::GLchar,
user_param: *mut libc::c_void)
{
let user_param = user_param as *const Context;
let user_param: &Context = unsafe { mem::transmute(user_param) };
if (severity == gl::DEBUG_SEVERITY_HIGH || severity == gl::DEBUG_SEVERITY_MEDIUM) &&
(ty == gl::DEBUG_TYPE_ERROR || ty == gl::DEBUG_TYPE_UNDEFINED_BEHAVIOR ||
ty == gl::DEBUG_TYPE_PORTABILITY || ty == gl::DEBUG_TYPE_DEPRECATED_BEHAVIOR)
{
if user_param.report_debug_output_errors.get() {
// reporting
let message = unsafe {
String::from_utf8(CStr::from_ptr(message).to_bytes().to_vec()).unwrap()
};
panic!("Debug message with high or medium severity: `{}`.\n\
Please report this error: https://github.com/tomaka/glium/issues",
message);
}
}
}
struct ContextRawPtr(*const Context);
unsafe impl Send for ContextRawPtr {}
let context_raw_ptr = ContextRawPtr(&**context);
unsafe {
let mut ctxt = context.make_current();
if ctxt.version >= &Version(Api::Gl, 4,5) || ctxt.version >= &Version(Api::GlEs, 3, 2) ||
ctxt.extensions.gl_khr_debug || ctxt.extensions.gl_arb_debug_output
{
if ctxt.state.enabled_debug_output_synchronous != true {
ctxt.gl.Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);
ctxt.state.enabled_debug_output_synchronous = true;
}
if ctxt.version >= &Version(Api::Gl, 4, 5) ||
ctxt.version >= &Version(Api::GlEs, 3, 2) ||
(ctxt.version >= &Version(Api::Gl, 1, 0) && ctxt.extensions.gl_khr_debug)
{
ctxt.gl.DebugMessageCallback(callback_wrapper, context_raw_ptr.0
as *const libc::c_void);
ctxt.gl.DebugMessageControl(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE, 0,
ptr::null(), gl::TRUE);
if ctxt.state.enabled_debug_output != Some(true) {
ctxt.gl.Enable(gl::DEBUG_OUTPUT);
ctxt.state.enabled_debug_output = Some(true);
}
} else if ctxt.version >= &Version(Api::GlEs, 2, 0) &&
ctxt.extensions.gl_khr_debug
{
ctxt.gl.DebugMessageCallbackKHR(callback_wrapper, context_raw_ptr.0
as *const libc::c_void);
ctxt.gl.DebugMessageControlKHR(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE, 0,
ptr::null(), gl::TRUE);
if ctxt.state.enabled_debug_output != Some(true) {
ctxt.gl.Enable(gl::DEBUG_OUTPUT);
ctxt.state.enabled_debug_output = Some(true);
}
} else {
ctxt.gl.DebugMessageCallbackARB(callback_wrapper, context_raw_ptr.0
as *const libc::c_void);
ctxt.gl.DebugMessageControlARB(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE,
0, ptr::null(), gl::TRUE);
ctxt.state.enabled_debug_output = Some(true);
}
}
}
}<|fim▁end|>
|
if ctxt.extensions.gl_nvx_gpu_memory_info {
ctxt.gl.GetIntegerv(gl::GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX,
&mut value[0]);
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for convenient
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost",
# "generic.ThreadedComment", ("Media Library", "fb_browse"),)),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
# )
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
# PAGE_MENU_TEMPLATES = (
# (1, "Top navigation bar", "pages/menus/dropdown.html"),
# (2, "Left-hand tree", "pages/menus/tree.html"),
# (3, "Footer", "pages/menus/footer.html"),
# )
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# ("Image",),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# ("Another name",),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the south application will be automatically added to the
# INSTALLED_APPS setting.
USE_SOUTH = True
########################
# MAIN DJANGO SETTINGS #
########################
# People who get code error notifications.
# In the format (('Full Name', '[email protected]'),
# ('Full Name', '[email protected]'))
ADMINS = (
('administrator', '[email protected]'),
)
MANAGERS = ADMINS
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['127.0.0.1']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = "Europe/Rome"
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# Supported languages
_ = lambda s: s
LANGUAGES = (
('en', _('English')),
)
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = True
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# Tuple of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = ("127.0.0.1",)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
)
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
#############
# DATABASES #
#############
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.postgresql_psycopg2",
# DB name or path to database file if using sqlite3.
"NAME": "mezzanine_mailchimper",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "localhost",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
"ATOMIC_REQUESTS": True,
}
}
#########
# PATHS #
#########
import os
# Full filesystem path to the project.
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Name of the directory for the project.
PROJECT_DIRNAME = PROJECT_ROOT.split(os.sep)[-1]
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_DIRNAME
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = STATIC_URL + "media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_DIRNAME
# Put strings here, like "/home/html/django_templates"
# or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
TEMPLATE_DIRS = (os.path.join(PROJECT_ROOT, "templates"),)
###########
# LOGGING #
###########
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'null': {
'class': 'django.utils.log.NullHandler',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler',
# 'email_backend': 'django.core.mail.backends.console.'
# 'EmailBackend',
}
},
'loggers': {
'django': {
'handlers': ['console'],
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.security': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'py.warnings': {
'handlers': ['console'],
},
}
}
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"mailchimper",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"mezzanine.accounts",
# "django_pdb",
"crispy_forms",
# "functional_tests",
)<|fim▁hole|>TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.static",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.tz",
"mezzanine.conf.context_processors.settings",
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.TemplateForDeviceMiddleware",
"mezzanine.core.middleware.TemplateForHostMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
# Uncomment the following if using any of the SSL settings:
# "mezzanine.core.middleware.SSLRedirectMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
# "django_pdb.middleware.PdbMiddleware",
)
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
DEBUG_TOOLBAR_CONFIG = {"INTERCEPT_REDIRECTS": False}
###################
# DEPLOY SETTINGS #
###################
# These settings are used by the default fabfile.py provided.
# Check fabfile.py for defaults.
# FABRIC = {
# "SSH_USER": "", # SSH username
# "SSH_PASS": "", # SSH password (consider key-based authentication)
# "SSH_KEY_PATH": "", # Local path to SSH key file, for key-based auth
# "HOSTS": [], # List of hosts to deploy to
# "VIRTUALENV_HOME": "", # Absolute remote path for virtualenvs
# "PROJECT_NAME": "", # Unique identifier for project
# "REQUIREMENTS_PATH": "", # Path to pip requirements, relative to project
# "GUNICORN_PORT": 8000, # Port gunicorn will listen on
# "LOCALE": "en_US.UTF-8", # Should end with ".UTF-8"
# "LIVE_HOSTNAME": "www.example.com", # Host for public site.
# "REPO_URL": "", # Git or Mercurial remote repo URL for the project
# "DB_PASS": "", # Live database password
# "ADMIN_PASS": "", # Live admin user password
# "SECRET_KEY": SECRET_KEY,
# "NEVERCACHE_KEY": NEVERCACHE_KEY,
# }
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
try:
from local_settings import *
except ImportError:
pass
# Make these unique, and don't share it with anybody.
SECRET_KEY = "%(SECRET_KEY)s"
NEVERCACHE_KEY = "%(NEVERCACHE_KEY)s"
CRISPY_TEMPLATE_PACK = 'bootstrap'
# for functional tests
INSTALLED_APPS = list(INSTALLED_APPS) + [
PACKAGE_NAME_GRAPPELLI, PACKAGE_NAME_FILEBROWSER,
'django.contrib.redirects']
from django import get_version
if int(get_version().split('.')[1]) <= 5:
TEST_RUNNER = 'discover_runner.DiscoverRunner'
TEST_DISCOVER_PATTERN = "test_*.py"
else:
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been
# defined so far, in order to provide some better defaults where
# applicable. We also allow this settings module to be imported
# without Mezzanine installed, as the case may be when using the
# fabfile, where setting the dynamic settings below isn't strictly
# required.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())<|fim▁end|>
|
# List of processors used by RequestContext to populate the context.
# Each one should be a callable that takes the request object as its
# only parameter and returns a dictionary to add to the context.
|
<|file_name|>test_discoveredhost.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Test class for Foreman Discovery
@Requirement: Discoveredhost
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: UI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
import subprocess
import time
from fauxfactory import gen_string
from nailgun import entities
from robottelo.decorators import (
run_in_one_thread,
run_only_on,
skip_if_not_set,
stubbed,
tier3
)
from robottelo.api.utils import configure_provisioning
from robottelo.libvirt_discovery import LibvirtGuest
from robottelo.test import UITestCase
from robottelo.ui.base import UIError
from robottelo.ui.factory import (
edit_param,
make_discoveryrule,
)
from robottelo.ui.locators import common_locators, locators, tab_locators
from robottelo.ui.session import Session
from time import sleep
@run_in_one_thread
class DiscoveryTestCase(UITestCase):
"""Implements Foreman discovery tests in UI."""
def _edit_discovery_fact_column_param(self, session, param_value):
"""
Edit the 'discovery_fact_column' parameter from settings menu.
User can populate a new column on 'Discovered Hosts' page by setting
the value of 'discovery_fact_column'
"""
tab_locator = tab_locators['settings.tab_discovered']
param_name = 'discovery_fact_column'
edit_param(
session=session,
tab_locator=tab_locator,
param_name=param_name,
value_type='input',
param_value=param_value,
)
saved_element = self.settings.get_saved_value(
tab_locator, param_name)
self.assertEqual(param_value, saved_element)
def _ping_host(self, host, timeout=60):
"""Helper to ensure given IP/hostname is reachable after reboot.
:param host: A string. The IP or hostname of host.
:param int timeout: The polling timeout in seconds.
"""
timeup = time.time() + int(timeout)
while True:
command = subprocess.Popen(
'ping -c1 {0}; echo $?'.format(host),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
output = command.communicate()[0]
# Checking the return code of ping is 0
if time.time() > timeup:
return False
if int(output.split()[-1]) == 0:
return True
else:
time.sleep(5)
@classmethod
@skip_if_not_set('vlan_networking')
def setUpClass(cls):
"""Steps to Configure foreman discovery
1. Build PXE default template
2. Create Organization/Location
3. Update Global parameters to set default org and location for
discovered hosts.
4. Enable auto_provision flag to perform discovery via discovery
rules.
"""
super(DiscoveryTestCase, cls).setUpClass()
# Build PXE default template to get default PXE file
entities.ConfigTemplate().build_pxe_default()
# Create Org and location
cls.org = entities.Organization(name=gen_string('alpha')).create()
cls.org_name = cls.org.name
cls.loc = entities.Location(
name=gen_string('alpha'),
organization=[cls.org],
).create()
# Update default org and location params to place discovered host
cls.discovery_loc = entities.Setting().search(
query={'search': 'name="discovery_location"'})[0]
cls.discovery_loc.value = cls.loc.name
cls.discovery_loc.update({'value'})
cls.discovery_org = entities.Setting().search(
query={'search': 'name="discovery_organization"'})[0]
cls.discovery_org.value = cls.org.name
cls.discovery_org.update({'value'})
# Enable flag to auto provision discovered hosts via discovery rules
cls.discovery_auto = entities.Setting().search(
query={'search': 'name="discovery_auto"'})[0]
cls.default_discovery_auto = str(cls.discovery_auto.value)
cls.discovery_auto.value = 'True'
cls.discovery_auto.update({'value'})
cls.config_env = configure_provisioning(org=cls.org, loc=cls.loc)
@classmethod
def tearDownClass(cls):
"""Restore default 'discovery_auto' global setting's value"""
cls.discovery_auto.value = cls.default_discovery_auto
cls.discovery_auto.update({'value'})
super(DiscoveryTestCase, cls).tearDownClass()
@run_only_on('sat')
@tier3
def test_positive_pxe_based_discovery(self):
"""Discover a host via PXE boot by setting "proxy.type=proxy" in
PXE default
@id: 43a8857d-2f08-436e-97fb-ffec6a0c84dd
@Setup: Provisioning should be configured
@Steps: PXE boot a host/VM
@Assert: Host should be successfully discovered
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
@run_only_on('sat')
@tier3
def test_positive_pxe_less_with_dhcp_unattended(self):
"""Discover a host with dhcp via bootable discovery ISO by setting
"proxy.type=proxy" in PXE default in unattended mode.
@id: fc13167f-6fa0-4fe5-8584-7716292866ce
@Setup: Provisioning should be configured
@Steps: Boot a host/VM using modified discovery ISO.
@Assert: Host should be successfully discovered
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest(boot_iso=True) as pxe_less_host:
hostname = pxe_less_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_with_dhcp_semiauto(self):
"""Discover a host with dhcp via bootable discovery ISO in
semi-automated mode.
@id: 05c88618-6f15-4eb8-8501-3505160c5450
@Setup: Provisioning should be configured
@Steps: Boot a host/VM using discovery ISO
@Assert: Host should be successfully discovered
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_with_dhcp_interactively(self):
"""Discover a host with dhcp via bootable discovery ISO using
interactive TUI mode.
@id: 08780627-9ac1-4837-88eb-df673d974d05
@Setup: Provisioning should be configured
@Steps: Boot a host/VM using discovery ISO
@Assert: Host should be successfully discovered
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_without_dhcp_interactively(self):
"""Discover a host with single NIC on a network without DHCP and PXE
using ISO image in interactive TUI interface.
@id: 9703eb00-9857-4076-8b83-031a58d7c1cd
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_without_dhcp_semiauto(self):
"""Discover a host with single NIC on a network without DHCP and PXE
using ISO image in semi-automated mode.
@id: 8254a85f-21c8-4483-b453-15126762f6e5
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_without_dhcp_unattended(self):
"""Discover a host with single NIC on a network without DHCP and PXE
using ISO image in unattended mode.
@id: ae75173f-8358-4886-9420-06cff3a8510e
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_discover_pxe_less_with_efi_host_interatively(self):
"""Discover a EFI host with single NIC on a network
using ISO image in interactive TUI mode.
@id: f13fd843-6b39-4c5e-bb7a-b9af9e71eb7b
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_discover_pxe_less_with_efi_host_unattended(self):
"""Discover a EFI host with single NIC on a network
using ISO image in unattended mode.
@id: 515d32ce-44eb-4d27-a353-699bc80fc566
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@tier3
def test_positive_pxe_less_multi_nic_with_dhcp_unattended(self):
"""Discover a host with multiple NIC on a network with dhcp
using ISO image in unattended mode.
@id: cdfebc3d-d8c1-4f82-a384-cc5cd9926c65
@Assert: Host should be discovered successfully
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, "interfaces")
with LibvirtGuest(boot_iso=True, extra_nic=True) as pxe_less_host:
hostname = pxe_less_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
element = locators['discoveredhosts.fetch_interfaces']
host_interfaces = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(u'eth0,eth1,lo', host_interfaces)
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_multi_nic_with_dhcp_interactively(self):
"""Discover a host with multiple NIC on a network with dhcp
using ISO image in interactive TUI mode.
@id: e29c7f71-096e-42ef-9bbf-77fecac86a9c
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_multi_nic_without_dhcp_interactively(self):
"""Discover a host with multiple NIC on a network without dhcp
using ISO image in interactive TUI mode.
@id: 206a375c-3f42-4cc8-b338-bb85127cffc9
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_pxe_less_multi_nic_without_dhcp_unattended(self):
"""Discover a host with multiple NIC on a network without dhcp
using ISO image in unattended mode.
@id: 1e25326d-2976-4a12-8e02-c4be6705f522
@Assert: Host should be discovered successfully
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@tier3
def test_positive_pxe_multi_nic_unattended(self):
"""Discover a host with multiple NIC on a network with dhcp
using pxe in unattended mode.
@id: 0d004ed0-594f-492f-8756-33349094aa8e
@Assert: Host should be discovered successfully
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, "interfaces")
with LibvirtGuest(extra_nic=True) as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
element = locators['discoveredhosts.fetch_interfaces']
host_interfaces = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(u'eth0,eth1,lo', host_interfaces)
@run_only_on('sat')
@tier3
def test_custom_facts_discovery(self):
"""Check if defined custom facts are displayed under host's facts
@id: 5492e063-72db-44b8-a34a-9c75c351b89a
@Setup: Provisioning should be configured
@Steps: Validate specified custom facts
@Assert: All defined custom facts should be displayed correctly
@CaseLevel: System
"""
param_value = 'myfact'
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, param_value)
with LibvirtGuest(boot_iso=True) as pxe_less_host:
hostname = pxe_less_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
element = locators['discoveredhosts.fetch_custom_fact']
custom_fact = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(u'somevalue', custom_fact)
@run_only_on('sat')
@tier3
def test_positive_provision_from_facts(self):
"""Provision the selected discovered host from facts page by
clicking 'provision'
@id: 610bbf32-b342-44ef-8339-0201e0592260
@Setup: Host should already be discovered
@Assert: Host should be provisioned successfully and entry from
discovered host should be auto removed
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
host_name = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
self.discoveredhosts.provision_discoveredhost(
hostname=host_name,
hostgroup=self.config_env['host_group'],
org=self.org_name,
loc=self.loc.name,
facts_page=True,
quick_create=True)
self.assertIsNotNone(self.discoveredhosts.wait_until_element(
common_locators['notif.success']))
search = self.hosts.search(
u'{0}.{1}'.format(host_name, self.config_env['domain'])
)
self.assertIsNotNone(search)
# Check that provisioned host is not in the list of discovered
# hosts anymore
self.assertIsNone(self.discoveredhosts.search(host_name))
@run_only_on('sat')
@tier3
def test_positive_delete(self):
"""Delete the selected discovered host
@id: 25a2a3ea-9659-4bdb-8631-c4dd19766014
@Setup: Host should already be discovered
@Assert: Selected host should be removed successfully
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.discoveredhosts.delete(hostname)
@run_only_on('sat')
@tier3
def test_positive_delete_from_facts(self):
"""Delete the selected discovered host from facts page
@id: 892aa809-bcf0-46ae-8495-70d7a6483b75
@Setup: Host should already be discovered
@Assert: Selected host should be removed successfully
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.discoveredhosts.delete_from_facts(hostname)
self.assertIsNone(self.discoveredhosts.search(hostname))
@run_only_on('sat')
@tier3
def test_positive_delete_multiple(self):
"""Delete multiple discovered hosts from 'Select Action'
drop down
@id: 556fb306-512f-46a4-8a0f-af8013161efe
@Setup: Host should already be discovered
@Assert: Selected host should be removed successfully
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_1_host:
host_1_name = pxe_1_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_1_name)
)
with LibvirtGuest() as pxe_2_host:
host_2_name = pxe_2_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_2_name)
)
hostnames = [host_1_name, host_2_name]
for hostname in hostnames:
host = self.discoveredhosts.search(hostname)
if not host:
raise UIError(
'Could not find the selected discovered host '
'"{0}"'.format(hostname)
)
self.discoveredhosts.navigate_to_entity()
# To delete multiple discovered hosts
self.discoveredhosts.multi_delete(hostnames)
for hostname in [host_1_name, host_2_name]:
self.assertIsNone(
self.discoveredhosts.search(hostname)
)
@run_only_on('sat')
@tier3
def test_positive_refresh_facts_pxe(self):
"""Refresh the facts of pxe-based discovered host by adding a new NIC.
@id: cda4103c-6d1a-4f9e-bf57-e516ef1f2a37
@Setup: Host should already be discovered
@Assert: Facts should be refreshed successfully with new NIC
@CaseLevel: System
"""
param_value = 'interfaces'
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, param_value)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
# To add a new network interface on discovered host
pxe_host.attach_nic()
# To refresh the facts of discovered host,
# UI should show newly added interface on refresh_facts
self.discoveredhosts.refresh_facts(hostname)
element = locators['discoveredhosts.fetch_interfaces']
host_interfaces = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(u'eth0,eth1,lo', host_interfaces)
@run_only_on('sat')
@tier3
def test_positive_refresh_facts_pxe_less(self):
"""Refresh the facts of pxe-less discovered host by adding a new NIC.
@id: 367a5336-a0fa-491b-8153-3e39d68eb978
@Setup: Host should already be discovered
@Assert: Facts should be refreshed successfully with new NIC
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, 'interfaces')
with LibvirtGuest(boot_iso=True) as pxe_less_host:
hostname = pxe_less_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
self.assertIsNotNone(self.discoveredhosts.search(hostname))
# To add a new network interface on discovered host
pxe_less_host.attach_nic()
# To refresh the facts of discovered host,
# UI should show newly added interface on refresh_facts
self.discoveredhosts.refresh_facts(hostname)
element = locators['discoveredhosts.fetch_interfaces']
host_interfaces = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(u'eth0,eth1,lo', host_interfaces)
@run_only_on('sat')
@tier3
def test_positive_reboot(self):
"""Reboot a discovered host.
@id: 5edc6831-bfc8-4e69-9029-b4c0caa3ee32
@Setup: Host should already be discovered
@Assert: Host should be successfully rebooted.
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
element = (locators['discoveredhosts.fetch_ip'] % hostname)
# Get the IP of discovered host
host_ip = self.discoveredhosts.fetch_fact_value(
hostname, element)
# Check if host is reachable via IP
self.assertTrue(self._ping_host(host_ip))
self.discoveredhosts.reboot_host(hostname)
for _ in range(12):
response = self._ping_host(host_ip, timeout=5)
if not response:
break
sleep(5)
else:
self.fail('Host was not stopped')
@run_only_on('sat')
@tier3
def test_positive_update_default_org(self):
"""Change the default org of more than one discovered hosts
from 'Select Action' drop down
@id: fe6ab6e0-c942-46c1-8ae2-4f4caf00e0d8
@Setup: Host should already be discovered
@Assert: Default org should be successfully changed for multiple hosts
@CaseLevel: System
"""
new_org = gen_string('alpha')
entities.Organization(name=new_org).create()
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_1_host:
host_1_name = pxe_1_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_1_name)
)
with LibvirtGuest() as pxe_2_host:
host_2_name = pxe_2_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_2_name)
)
hostnames = [host_1_name, host_2_name]
for hostname in hostnames:
self.assertIsNotNone(
self.discoveredhosts.search(hostname))
self.discoveredhosts.update_org_loc(hostnames, new_org)
@run_only_on('sat')
@tier3
def test_positive_update_default_location(self):
"""Change the default location of more than one discovered hosts
from 'Select Action' drop down
@id: 537bfb51-144a-44be-a087-d2437f074464
@Setup: Host should already be discovered
@Assert: Default Location should be successfully changed for multiple
hosts
@CaseLevel: System
"""
loc = entities.Location().create()
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_1_host:
host_1_name = pxe_1_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_1_name)
)
with LibvirtGuest() as pxe_2_host:
host_2_name = pxe_2_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_2_name)
)
hostnames = [host_1_name, host_2_name]
for hostname in hostnames:
self.assertIsNotNone(
self.discoveredhosts.search(hostname))
self.discoveredhosts.update_org_loc(
hostnames, new_loc=loc.name)
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_auto_provision_host_with_rule(self):
"""Create a new discovery rule and provision a discovered host using
that discovery rule.
Set query as (e.g IP=IP_of_discovered_host)
@id: 00686008-87eb-4b76-9579-ceddb578ef31
@Setup: Host should already be discovered
@Assert: Host should reboot and provision
@CaseLevel: System
@caseautomation: notautomated
"""
@run_only_on('sat')
@tier3
def test_positive_manual_provision_host_with_rule(self):
"""Create a new discovery rule and manually provision a discovered host using
that discovery rule.
Set query as (e.g IP=IP_of_discovered_host)
@id: 4488ab9a-d462-4a62-a1a1-e5656c8a8b99
@Setup: Host should already be discovered
@Assert: Host should reboot and provision
@CaseLevel: System
"""
rule_name = gen_string('alpha')
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
host_name = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
element = (locators['discoveredhosts.fetch_ip'] % host_name)
# Get the IP of discovered host
host_ip = self.discoveredhosts.fetch_fact_value(
host_name, element)
# Define a discovery rule with IP_address
make_discoveryrule(<|fim▁hole|> name=rule_name,
host_limit=1,
hostgroup=self.config_env['host_group'],
search_rule=host_ip,
locations=[self.loc.name],
)
self.assertIsNotNone(self.discoveryrules.search(rule_name))
self.discoveredhosts.auto_provision(host_name)
self.assertIsNotNone(self.discoveredhosts.wait_until_element(
common_locators['notif.success']))
self.assertIsNotNone(self.hosts.search(
u'{0}.{1}'.format(host_name, self.config_env['domain'])))
# Check that provisioned host is not in the list of discovered
# hosts anymore
self.assertIsNone(self.discoveredhosts.search(host_name))
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_provision_multi_host_with_rule(self):
"""Create a new discovery rule with (host_limit = 0)
that applies to multi hosts.
Set query as cpu_count = 1 OR mem > 500
@id: d25c088f-ee7a-4a3a-9b51-8f65f545e680
@Setup: Multiple hosts should already be discovered in same subnet.
@Assert: All Hosts of same subnet should reboot and provision
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_provision_with_rule_priority(self):
"""Create multiple discovery rules with different priority and check
rule with highest priority executed first
@id: 8daf0b35-912b-441d-97d3-45f48799f4ba
@Setup: Multiple hosts should already be discovered
@Assert: Host with lower count have higher priority
and that rule should be executed first.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@tier3
def test_positive_provision_without_auto_provision(self):
"""Create a discovery rule and execute it when
"auto_provisioning" flag set to 'false'
@id: 25f5112b-7bbd-4bda-8d75-c43bd6390aa8
@Setup: Host should already be discovered
@Assert: Host should not be rebooted automatically
@CaseLevel: System
"""
try:
# Disable flag to auto provision
discovery_auto = entities.Setting().search(
query={'search': 'name="discovery_auto"'})[0]
default_discovery_auto = discovery_auto.value
discovery_auto.value = 'False'
discovery_auto.update(['value'])
rule_name = gen_string('alpha')
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# Define a discovery rule
make_discoveryrule(
session,
name=rule_name,
host_limit=1,
hostgroup=self.config_env['host_group'],
search_rule='cpu_count = 1',
locations=[self.loc.name],
)
self.assertIsNotNone(self.discoveryrules.search(rule_name))
with LibvirtGuest() as pxe_host:
host_name = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
self.assertIsNotNone(
self.discoveredhosts.search(host_name))
# Check that host shouldn't list under all hosts
self.assertIsNone(self.hosts.search(
u'{0}.{1}'.format(host_name, self.config_env['domain'])
))
# Check that host still listed under discovered hosts
self.assertIsNotNone(
self.discoveredhosts.search(host_name))
finally:
# Revert the discovery_auto flag to default value
discovery_auto.value = default_discovery_auto
discovery_auto.update(['value'])
@run_only_on('sat')
@stubbed()
@tier3
def test_negative_create_discovery_rule(self):
"""Create a discovery rule with invalid query
e.g. BIOS = xyz
@id: 89014adf-6346-4681-9107-6d92e14b6a3e
@Setup: Host should already be discovered
@Assert: Rule should automatically be skipped on clicking
'Auto provision'. UI Should raise 'No matching rule found'
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_multi_provision_with_rule_limit(self):
"""Create a discovery rule (CPU_COUNT = 2) with host limit 1 and
provision more than one host with same rule
@id: ab14c56d-331f-466b-aeb0-41fb19f7b3aa
@Setup: Host with two CPUs should already be discovered
@Assert: Rule should only be applied to one discovered host and for
other rule should already be skipped.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_update_discovery_rule(self):
"""Update an existing rule and execute it
@id: 0969cf6f-215d-44c5-96b5-91cb1d865ad0
@Setup: Host should already be discovered
@Assert: User should be able to update the rule and it should be
executed on discovered host
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@tier3
def test_positive_update_name(self):
"""Update the discovered host name and provision it
@id: 3770b007-5006-4815-ae03-fbd330aad304
@Setup: Host should already be discovered
@Assert: The hostname should be updated and host should be provisioned
@CaseLevel: System
"""
name = gen_string('alpha')
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
host_name = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
self.discoveredhosts.provision_discoveredhost(
hostname=host_name,
hostgroup=self.config_env['host_group'],
org=self.org_name,
loc=self.loc.name,
new_name=name)
new_host_name = (
u'{0}.{1}'.format(name, self.config_env['domain']))
self.assertIsNotNone(self.hosts.search(new_host_name))
# Check that provisioned host is not in the list of discovered
# hosts anymore
self.assertIsNone(self.discoveredhosts.search(host_name))
@run_only_on('sat')
@tier3
def test_positive_auto_provision_all(self):
"""Discover a bunch of hosts and auto-provision all
@id: e26129b5-16fa-418c-b768-21670e9f0b74
@Assert: All host should be successfully rebooted and provisioned
@CaseLevel: System
"""
rule_name = gen_string('alpha')
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_1_host:
host_1_name = pxe_1_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_1_name)
)
with LibvirtGuest() as pxe_2_host:
host_2_name = pxe_2_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_2_name)
)
# Define a discovery rule
make_discoveryrule(
session,
name=rule_name,
host_limit=2,
hostgroup=self.config_env['host_group'],
search_rule='cpu_count = 1',
locations=[self.loc.name],
)
self.assertIsNotNone(self.discoveryrules.search(rule_name))
self.discoveredhosts.auto_provision_all()
hostnames = [host_1_name, host_2_name]
for hostname in hostnames:
self.assertIsNotNone(self.hosts.search(
u'{0}.{1}'.format(
hostname, self.config_env['domain'])))
# Check that provisioned host is not in the list of
# discovered hosts anymore
self.assertIsNone(
self.discoveredhosts.search(hostname))
@run_only_on('sat')
@tier3
def test_positive_add_fact_column(self):
"""Add a new fact column to display on discovered host page
@id: 914bd47f-b2a6-459e-b166-70dbc9ce1bc6
@Steps:
1. Goto settings -> Discovered tab -> discovery_fact_coloumn
2. Edit discovery_fact_coloumn
3. Add bios_vendor
@Assert: The added fact should be displayed on 'discovered_host' page
after successful discovery
@CaseLevel: System
"""
param_value = 'bios_vendor'
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, param_value)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
element = locators['discoveredhosts.fetch_bios']
host_bios = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(u'Seabios', host_bios)
@run_only_on('sat')
@tier3
def test_negative_add_fact(self):
"""Add a new fact column with invalid fact to display on
discovered host page
@id: 4e9bc843-4ba2-40d4-a1b3-2d7be117664f
@Steps:
1. Goto settings -> Discovered tab -> discovery_fact_coloumn
2. Edit discovery_fact_coloumn
3. Add 'test'
@Assert: The added fact should be displayed on 'discovered_host' page
after successful discovery and shows 'N/A'
@CaseLevel: System
"""
param_value = 'test'
expected_value = u'N/A'
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
# To show new fact column 'Interfaces' on Discovered Hosts page
self._edit_discovery_fact_column_param(session, param_value)
with LibvirtGuest() as pxe_host:
hostname = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(hostname)
)
element = (
locators['discoveredhosts.fetch_fact'] % expected_value
)
fact_value = self.discoveredhosts.fetch_fact_value(
hostname, element)
self.assertEqual(expected_value, fact_value)
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_discovery_manager_role(self):
"""Assign 'Discovery_Manager' role to a normal user
@id: c219c877-e785-41a3-9abe-803a9b26bcad
@Assert: User should be able to view, provision, edit and destroy one
or more discovered host as well view, create_new, edit, execute and
delete discovery rules.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_discovery_reader_role(self):
"""Assign 'Discovery Reader" role to a normal user
@id: 075bd559-a3bb-42ca-86a4-60581c650a1d
@Assert: User should be able to view existing discovered host and rule
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_validate_pxe_less_discovery_status_screen(self):
"""Validate all the buttons from "Discovery Status" TUI screen of a
pxe-less discovered host
@id: a18694ad-7642-472f-8e7c-c911c892a763
@Assert: All buttons should work
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_negative_validate_network_config_screen(self):
"""Validate network configuration screen by specifying invalid
IP/gateway/DNS address notation.
@id: b1d24367-9a7e-4d8e-85b6-989d8c520498
@Assert: User should get an error message
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_negative_pxe_less_discovery_without_dhcp(self):
"""Discover a host via pxe-less and select "Discover using DHCP"
interactively when no dhcp is available.
@id: adef940c-8948-4cd9-88b3-f0b307134536
@Assert: User should get an error message "Unable to bring network via
DHCP" and click on 'OK' should open the ''Network configuration screen"
to manually specify the IP/GW/DNS.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_provision_with_org_loc_from_new_model_window(self):
"""Provision a discovered host manually by associating org & loc from
host properties model window and select create host button.
@id: 8c6a7d3f-e34e-4888-9b1c-58e71ee584a3
@Assert: Provisioned host is associated with selected org & location
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@tier3
def test_positive_provision_with_hostgroup_from_new_model_window(self):
"""Provision a discovered host manually by associating hostgroup from
host properties model window and select create host button.
@id: f17fb8c9-f9cb-4547-80bc-3b40c6691bb1
@Assert: Provisioned host is created with selected host-group and entry
from discovered host should be auto removed.
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
host_name = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
self.assertIsNotNone(self.discoveredhosts.search(host_name))
self.discoveredhosts.provision_discoveredhost(
hostname=host_name,
hostgroup=self.config_env['host_group'],
org=self.org_name,
loc=self.loc.name)
self.assertIsNotNone(self.hosts.search(
u'{0}.{1}'.format(host_name, self.config_env['domain'])))
# Check that provisioned host is not in the list of discovered
# hosts anymore
self.assertIsNone(self.discoveredhosts.search(host_name))
@run_only_on('sat')
@tier3
def test_positive_provision_using_quick_host_button(self):
"""Associate hostgroup while provisioning a discovered host from
host properties model window and select quick host.
@id: 34c1e9ea-f210-4a1e-aead-421eb962643b
@Setup:
1. Host should already be discovered
2. Hostgroup should already be created with all required entities.
@Assert: Host should be quickly provisioned and entry from
discovered host should be auto removed.
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
host_name = pxe_host.guest_name
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
self.assertIsNotNone(self.discoveredhosts.search(host_name))
self.discoveredhosts.provision_discoveredhost(
hostname=host_name,
hostgroup=self.config_env['host_group'],
org=self.org_name,
loc=self.loc.name,
quick_create=True)
self.assertIsNotNone(self.hosts.search(
u'{0}.{1}'.format(host_name, self.config_env['domain'])))
# Check that provisioned host is not in the list of discovered
# hosts anymore
self.assertIsNone(self.discoveredhosts.search(host_name))
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_provision_with_facts_set_by_user(self):
"""Provision a discovered host with clear_all_facts setting's default
value 'No'
@id: 5dbb9a9f-117d-41aa-8f15-d4da6163b244
@Setup:
1. Host should already be discovered
2. Go to setting -> clear_all_facts -> No
@Assert: After successful provisioning, all facts set by user should be
visible, including the one started with discovery keyword.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_provision_with_clear_facts_set_by_user(self):
"""Provision a discovered host by setting clear_all_facts
value to 'Yes'
@id: 9f153b3a-4c21-41a2-b2a0-a0b1bee262d3
@Setup:
1. Host should already be discovered
2. Go to setting -> clear_all_facts -> Yes
@Assert: After successful provisioning, all facts set by user should be
deleted execpt the one started with discovery keyword.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_lock_discovered_host_into_discovery(self):
"""Lock host into discovery via PXE configuration
@id: 4ba9f923-0b8f-40ee-8bcb-90ff496587c4
@Steps:
1. Go to setting -> discovery_lock -> true
2. Go to setting -> discovery_lock_template -> template to be locked
with
@Assert: Host should boot into discovery mode and should be discovered.
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_populate_puppet_params_using_hostgroup(self):
"""On provisioning a host associate hostgroup and see if PuppetCA
and Puppetmaster are being populated.
@id: 21e55ffa-02bc-4f96-b463-887da30fb1c4
@Steps:
1. Discover a host
2. Create a hostgroup with puppetCA and puppetmaster
@Assert: Parameters like PuppetCA/Puppetmaster should be populated on
associating hostgroup to discovered host
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_update_default_org_from_settings(self):
"""Update the default 'Discovery Organization' settings to place the
discovered hosts in.
@id: 596a98ad-90f6-42ff-b8ef-47f02dc5d595
@Steps:
1. Go to setting -> Discovered -> Discovery organization
2. Update default org from dropdown
@Assert: Discovered host should automatically be placed in selected
default org
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_update_default_location_from_settings(self):
"""Update the default 'Discovery Location' settings to place the
discovered hosts in.
@id: 4bba9899-a53e-4521-b212-aee893f7a726
@Steps:
1. Go to setting -> Discovered -> Discovery Location
2. Update default location from dropdown
@Assert: Discovered host should automatically be placed in selected
default location
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_check_network_facts(self):
"""Check if network facts ending with _eth0 are correctly displayed
under discovered host page
@id: 5a06236c-05dc-4a98-b1b5-9586c95203f9
@Assert: Network facts like below should be displayed on discovered
host page:
1. facts ending with _eth0
2. auto_negotiation_XXX
3. LLDAP facts like lldp_neighbor_portid_XXX
@caseautomation: notautomated
@CaseLevel: System
"""
@run_only_on('sat')
@stubbed()
@tier3
def test_positive_rebuild_dns_on_provisioning(self):
"""Force DNS rebuild when provisioning discovered host
@id: 87aa3279-7c29-40e8-a4d2-0aab43f0972f
@Setup: Make sure 'discovery_always_rebuild_dns' setting set to true
@Assert: DNS record should be recreated on provisioning discovered host
@caseautomation: notautomated
@CaseLevel: System
"""
class DiscoveryPrefixTestCase(UITestCase):
"""Test around updating Discovery Prefix"""
@classmethod
def setUpClass(cls):
"""Update discovery prefix with some string than default 'mac'"""
super(DiscoveryPrefixTestCase, cls).setUpClass()
cls.org = entities.Organization(name=gen_string('alpha')).create()
cls.org_name = cls.org.name
# Update hostname_prefix with some string other than default 'mac'
cls.prefix = 'dhost'
cls.discovery_prefix = entities.Setting().search(
query={'search': 'name="discovery_prefix"'})[0]
cls.default_prefix = str(cls.discovery_prefix.value)
cls.discovery_prefix.value = cls.prefix
cls.discovery_prefix.update(['value'])
cls.discovery_org = entities.Setting().search(
query={'search': 'name="discovery_organization"'})[0]
cls.discovery_org.value = cls.org.name
cls.discovery_org.update(['value'])
@classmethod
def tearDownClass(cls):
"""Restore default 'hostname_prefix' global setting's value"""
cls.discovery_prefix.value = cls.default_prefix
cls.discovery_prefix.update(['value'])
super(DiscoveryPrefixTestCase, cls).tearDownClass()
@run_only_on('sat')
@tier3
def test_positive_update_discovery_prefix(self):
"""Update the discovery_prefix parameter other than mac
@id: 08f1d852-e9a0-430e-b73a-e2a7a144ac10
@Steps:
1. Goto settings -> Discovered tab -> discovery_prefix
2. Edit discovery_prefix using any text that must start with a letter
@Setup: Host should already be discovered
@Assert: Host should be discovered with updated prefix.
@CaseLevel: System
"""
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
with LibvirtGuest() as pxe_host:
host_mac = pxe_host.mac
host_name = '{0}{1}'.format(
self.prefix, host_mac.replace(':', "")
)
self.assertTrue(
self.discoveredhosts.waitfordiscoveredhost(host_name)
)
self.assertIsNotNone(self.discoveredhosts.search(host_name))<|fim▁end|>
|
session,
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Demo component
// this is only example component
// you can find tests in __test__ folder
import React from 'react';
import Button from './components/Button'
class TeamCatfish extends React.Component {
render() {
return (
<div className="team-catfish">
<p>TC</p><|fim▁hole|>
module.exports = {
TeamCatfish,
...Button
}<|fim▁end|>
|
</div>
)
}
};
|
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>use cargo::core::Workspace;
use cargo::ops::{self, MessageFormat, Packages};
use cargo::util::{CliResult, CliError, Config, CargoErrorKind};
use cargo::util::important_paths::find_root_manifest_for_wd;
#[derive(Deserialize)]
pub struct Options {
arg_args: Vec<String>,
flag_features: Vec<String>,
flag_all_features: bool,
flag_jobs: Option<u32>,
flag_manifest_path: Option<String>,
flag_no_default_features: bool,
flag_no_run: bool,
flag_package: Vec<String>,
flag_target: Option<String>,
flag_lib: bool,
flag_doc: bool,
flag_bin: Vec<String>,
flag_bins: bool,
flag_example: Vec<String>,
flag_examples: bool,
flag_test: Vec<String>,
flag_tests: bool,
flag_bench: Vec<String>,
flag_benches: bool,
flag_verbose: u32,
flag_quiet: Option<bool>,
flag_color: Option<String>,
flag_message_format: MessageFormat,
flag_release: bool,
flag_no_fail_fast: bool,
flag_frozen: bool,
flag_locked: bool,
flag_all: bool,
flag_exclude: Vec<String>,
}
pub const USAGE: &'static str = "
Execute all unit and integration tests of a local package
Usage:
cargo test [options] [--] [<args>...]
Options:
-h, --help Print this message
--lib Test only this package's library
--doc Test only this library's documentation
--bin NAME ... Test only the specified binary
--bins Test all binaries
--example NAME ... Check that the specified examples compile
--examples Check that all examples compile
--test NAME ... Test only the specified test target
--tests Test all tests
--bench NAME ... Test only the specified bench target
--benches Test all benches
--no-run Compile, but don't run tests
-p SPEC, --package SPEC ... Package to run tests for
--all Test all packages in the workspace
--exclude SPEC ... Exclude packages from the test
-j N, --jobs N Number of parallel builds, see below for details
--release Build artifacts in release mode, with optimizations
--features FEATURES Space-separated list of features to also build
--all-features Build all available features
--no-default-features Do not build the `default` feature
--target TRIPLE Build for the target triple
--manifest-path PATH Path to the manifest to build tests for
-v, --verbose ... Use verbose output (-vv very verbose/build.rs output)
-q, --quiet No output printed to stdout
--color WHEN Coloring: auto, always, never
--message-format FMT Error format: human, json [default: human]
--no-fail-fast Run all tests regardless of failure
--frozen Require Cargo.lock and cache are up to date
--locked Require Cargo.lock is up to date
All of the trailing arguments are passed to the test binaries generated for
filtering tests and generally providing options configuring how they run. For
example, this will run all tests with the name `foo` in their name:
cargo test foo
If the --package argument is given, then SPEC is a package id specification
which indicates which package should be tested. If it is not given, then the
current package is tested. For more information on SPEC and its format, see the
`cargo help pkgid` command.
All packages in the workspace are tested if the `--all` flag is supplied. The
`--all` flag may be supplied in the presence of a virtual manifest.
The --jobs argument affects the building of the test executable but does
not affect how many jobs are used when running the tests. The default value
for the --jobs argument is the number of CPUs. If you want to control the
number of simultaneous running test cases, pass the `--test-threads` option
to the test binaries:
cargo test -- --test-threads=1
<|fim▁hole|>keep results readable. Test output can be recovered (e.g. for debugging)
by passing `--nocapture` to the test binaries:
cargo test -- --nocapture
To get the list of all options available for the test binaries use this:
cargo test -- --help
";
pub fn execute(options: Options, config: &Config) -> CliResult {
config.configure(options.flag_verbose,
options.flag_quiet,
&options.flag_color,
options.flag_frozen,
options.flag_locked)?;
let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?;
let empty = Vec::new();
let (mode, filter);
if options.flag_doc {
mode = ops::CompileMode::Doctest;
filter = ops::CompileFilter::new(true, &empty, false, &empty, false,
&empty, false, &empty, false);
} else {
mode = ops::CompileMode::Test;
filter = ops::CompileFilter::new(options.flag_lib,
&options.flag_bin, options.flag_bins,
&options.flag_test, options.flag_tests,
&options.flag_example, options.flag_examples,
&options.flag_bench, options.flag_benches);
}
let spec = Packages::from_flags(options.flag_all,
&options.flag_exclude,
&options.flag_package)?;
let ops = ops::TestOptions {
no_run: options.flag_no_run,
no_fail_fast: options.flag_no_fail_fast,
only_doc: options.flag_doc,
compile_opts: ops::CompileOptions {
config: config,
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| &s[..]),
features: &options.flag_features,
all_features: options.flag_all_features,
no_default_features: options.flag_no_default_features,
spec: spec,
release: options.flag_release,
mode: mode,
filter: filter,
message_format: options.flag_message_format,
target_rustdoc_args: None,
target_rustc_args: None,
},
};
let ws = Workspace::new(&root, config)?;
let err = ops::run_tests(&ws, &ops, &options.arg_args)?;
match err {
None => Ok(()),
Some(err) => {
Err(match err.exit.as_ref().and_then(|e| e.code()) {
Some(i) => CliError::new(err.hint().into(), i),
None => CliError::new(CargoErrorKind::CargoTestErrorKind(err).into(), 101),
})
}
}
}<|fim▁end|>
|
Compilation can be configured via the `test` profile in the manifest.
By default the rust test harness hides output from test execution to
|
<|file_name|>test_true_condition.py<|end_file_name|><|fim▁begin|>import pytest
from clustaar.authorize.conditions import TrueCondition
@pytest.fixture
def condition():
return TrueCondition()
<|fim▁hole|><|fim▁end|>
|
class TestCall(object):
def test_returns_true(self, condition):
assert condition({})
|
<|file_name|>test_dist.py<|end_file_name|><|fim▁begin|>from jabbapylib.distance.dist import lev_dist, ham_dist, similarity
def test_lev_dist():
assert lev_dist('ag-tcc', 'cgctca') == 3
assert lev_dist('GUMBO', 'GAMBOL') == 2
assert lev_dist('Google', 'Yahoo!') == 6<|fim▁hole|>def test_ham_dist():
assert ham_dist('toned', 'roses') == 3
def test_similarity():
assert similarity('toned', 'roses') == 2<|fim▁end|>
| |
<|file_name|>pdf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-*- coding: utf-8 -*-
### 2008-2015 Charlie Barnes.
### This program is free software; you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation; either version 2 of the License, or
### (at your option) any later version.
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
def repeat_to_length(string_to_expand, length):
return (string_to_expand * ((length/len(string_to_expand))+1))[:length]
try:
from fpdf import FPDF
except ImportError:
from pyfpdf import FPDF
class PDF(FPDF):
def __init__(self, orientation,unit,format):
FPDF.__init__(self, orientation=orientation,unit=unit,format=format)
self.toc = []
self.numbering = False
self.num_page_num = 0
self.toc_page_break_count = 1
self.set_left_margin(10)
self.set_right_margin(10)
self.do_header = False
self.type = None
self.toc_length = 0
self.doing_the_list = False
self.vcs = []
self.toc_page_num = 2
self.dataset = None
self.orientation = orientation
self.orientation_changes = [0]
def p_add_page(self):
#if(self.numbering):
self.add_page()
self.num_page_num = self.num_page_num + 1
def num_page_no(self):
return self.num_page_num
def startPageNums(self):
self.numbering = True
def stopPageNums(self):
self.numbering = False
def TOC_Entry(self, txt, level=0):
self.toc.append({'t':txt, 'l':level, 'p':str(self.num_page_no()+self.toc_length)})
def insertTOC(self, location=1, labelSize=20, entrySize=10, tocfont='Helvetica', label='Table of Contents'):
#make toc at end
self.stopPageNums()<|fim▁hole|> self.section = 'Contents'
self.p_add_page()
tocstart = self.page
self.set_font('Helvetica', '', 20)
self.multi_cell(0, 20, 'Contents', 0, 'J', False)
used_pages = []
link_abscissa = {}
for t in self.toc:
#Offset
level = t['l']
if level > 0:
self.cell(level*8)
weight = ''
if level == 0:
weight = 'B'
txxt = t['t']
self.set_font(tocfont, weight, entrySize)
strsize = self.get_string_width(txxt)
self.cell(strsize+2, self.font_size+2, txxt, 0, 0, '', False)
#store the TOC links & position for later use
if self.page_no() not in link_abscissa.keys():
link_abscissa[self.page_no()] = []
link_abscissa[self.page_no()].append([int(t['p']), self.y])
#Filling dots
self.set_font(tocfont, '', entrySize)
PageCellSize = self.get_string_width(t['p'])+2
w = self.w-self.l_margin-self.r_margin-PageCellSize-(level*8)-(strsize+2)
nb = w/self.get_string_width('.')
dots = repeat_to_length('.', int(nb))
self.cell(w, self.font_size+2, dots, 0, 0, 'R')
#Page number of the toc entry
self.cell(PageCellSize, self.font_size+2, str(int(t['p'])), 0, 1, 'R')
if self.toc_page_break_count%2 != 0:
self.section = ''
self.toc_page_break_count = self.toc_page_break_count + 1
self.p_add_page()
#Grab it and move to selected location
n = self.page
ntoc = n - tocstart + 1
last = []
#store toc pages
i = tocstart
while i <= n:
last.append(self.pages[i])
i = i + 1
#move pages
i = tocstart
while i >= (location-1):
self.pages[i+ntoc] = self.pages[i]
i = i - 1
#Put toc pages at insert point
i = 0
while i < ntoc:
self.pages[location + i] = last[i]
#loop through all the TOC links for this page and add them
try:
for linkdata in link_abscissa[tocstart+i]:
self.page = location + i
link = self.add_link()
self.set_link(link, y=0, page=linkdata[0])
self.link(x=self.l_margin, y=linkdata[1], w=self.w-self.r_margin, h=self.font_size+2, link=link)
except KeyError:
pass
i = i + 1
self.page = n
def header(self):
if self.do_header:
self.set_font('Helvetica', '', 8)
self.set_text_color(0, 0, 0)
self.set_line_width(0.1)
if (self.section <> 'Contents' and self.page_no()%2 == 0) or (self.section == 'Contents' and self.toc_page_break_count%2 == 0):
self.cell(0, 5, self.section, 'B', 0, 'L', 0) # even page header
self.cell(0, 5, self.title.replace('\n', ' - '), 'B', 1, 'R', 0) # even page header
elif (self.section <> 'Contents' and self.page_no()%2 == 1) or (self.section == 'Contents' and self.toc_page_break_count%2 == 1):
self.cell(0, 5, self.section, 'B', 1, 'R', 0) #odd page header
if self.type == 'list' and self.doing_the_list == True:
col_width = 12.7#((self.w - self.l_margin - self.r_margin)/2)/7.5
#vc headings
self.set_font('Helvetica', '', 10)
self.set_line_width(0.0)
self.set_y(20)
self.set_x(self.w-(7+col_width+(((col_width*3)+(col_width/4))*len(self.vcs))))
self.cell(col_width, 5, '', '0', 0, 'C', 0)
for vc in sorted(self.vcs):
if vc == None:
vc_head_text = ''
else:
vc_head_text = ''.join(['VC',vc])
self.cell((col_width*3), 5, vc_head_text, '0', 0, 'C', 0)
self.cell(col_width/4, 5, '', '0', 0, 'C', 0)
self.ln()
self.set_x(self.w-(7+col_width+(((col_width*3)+(col_width/4))*len(self.vcs))))
self.set_font('Helvetica', '', 8)
self.cell(col_width, 5, '', '0', 0, 'C', 0)
for vc in sorted(self.vcs):
#colum headings
self.cell(col_width, 5, ' '.join([self.dataset.config.get('List', 'distribution_unit'), 'sqs']), '0', 0, 'C', 0)
self.cell(col_width, 5, 'Records', '0', 0, 'C', 0)
self.cell(col_width, 5, 'Last in', '0', 0, 'C', 0)
self.cell(col_width/4, 5, '', '0', 0, 'C', 0)
self.y0 = self.get_y()
if self.section == 'Contributors' or self.section == 'Contents':
self.set_y(self.y0 + 20)
def footer(self):
self.set_y(-20)
self.set_font('Helvetica','',8)
#only show page numbers in the main body
#if self.num_page_no() >= 4 and self.section != 'Contents' and self.section != 'Index' and self.section != 'Contributors' and self.section != 'References' and self.section != 'Introduction' and self.section != '':
if self.num_page_no() >= 5 and self.section != 'Contents' and self.section != '' and self.section != 'Index' and self.section != 'Contributors' and self.section != 'References' and self.section != 'Introduction':
self.cell(0, 10, str(self.num_page_no()+self.toc_length), '', 0, 'C')
def setcol(self, col):
self.col = col
x = 10 + (col*100)
self.set_left_margin(x)
self.set_x(x)
def accept_page_break(self):
if self.section == 'Contents':
self.toc_page_break_count = self.toc_page_break_count + 1
if self.section == 'Contributors':
self.set_y(self.y0+20)
if self.section == 'Index':
if (self.orientation == 'Portrait' and self.col == 0) or (self.orientation == 'Landscape' and (self.col == 0 or self.col == 1)) :
self.setcol(self.col + 1)
self.set_y(self.y0+20)
return False
else:
self.setcol(0)
self.p_add_page()
self.set_y(self.y0+20)
return False
else:
return True<|fim▁end|>
| |
<|file_name|>DispersionSpectrumLike.py<|end_file_name|><|fim▁begin|>import copy
import pandas as pd
from threeML.plugins.SpectrumLike import SpectrumLike
from threeML.utils.OGIP.response import InstrumentResponse
from threeML.utils.spectrum.binned_spectrum import (
BinnedSpectrumWithDispersion,
ChannelSet,
)
__instrument_name = "General binned spectral data with energy dispersion"
class DispersionSpectrumLike(SpectrumLike):
def __init__(
self,
name,
observation,
background=None,
background_exposure=None,
verbose=True,
tstart=None,
tstop=None,
):
"""
A plugin for generic spectral data with energy dispersion, accepts an observed binned spectrum,
and a background binned spectrum or plugin with the background data.
In the case of a binned background spectrum, the background model is profiled
out and the appropriate profile-likelihood is used to fit the total spectrum. In this
case, caution must be used when there are zero background counts in bins as the
profiled background parameters (one per channel) will then have zero information from which to
constrain the background. It is recommended to bin the spectrum such that there is one background count
per channel.
If either an SpectrumLike or XYLike instance is provided as background, it is assumed that this is the
background data and the likelihood model from this plugin is used to simultaneously fit the background
and source.
:param name: the plugin name
:param observation: the observed spectrum
:param background: the background spectrum or a plugin from which the background will be modeled
:param background_exposure: (optional) adjust the background exposure of the modeled background data comes from and
XYLike plugin
:param verbose: turn on/off verbose logging
"""
assert isinstance(
observation, BinnedSpectrumWithDispersion
), "observed spectrum is not an instance of BinnedSpectrumWithDispersion"
assert (
observation.response is not None<|fim▁hole|>
self._rsp = observation.response # type: InstrumentResponse
super(DispersionSpectrumLike, self).__init__(
name=name,
observation=observation,
background=background,
background_exposure=background_exposure,
verbose=verbose,
tstart=tstart,
tstop=tstop,
)
def set_model(self, likelihoodModel):
"""
Set the model to be used in the joint minimization.
"""
# Store likelihood model
self._like_model = likelihoodModel
# We assume there are no extended sources, since we cannot handle them here
assert self._like_model.get_number_of_extended_sources() == 0, (
"OGIP-like plugins do not support " "extended sources"
)
# Get the differential flux function, and the integral function, with no dispersion,
# we simply integrate the model over the bins
differential_flux, integral = self._get_diff_flux_and_integral(self._like_model)
self._rsp.set_function(integral)
def _evaluate_model(self):
"""
evaluates the full model over all channels
:return:
"""
return self._rsp.convolve()
def get_simulated_dataset(self, new_name=None, **kwargs):
"""
Returns another DispersionSpectrumLike instance where data have been obtained by randomizing the current expectation from the
model, as well as from the background (depending on the respective noise models)
:return: a DispersionSpectrumLike simulated instance
"""
# pass the response thru to the constructor
return super(DispersionSpectrumLike, self).get_simulated_dataset(
new_name=new_name, **kwargs
)
def get_pha_files(self):
info = {}
# we want to pass copies so that
# the user doesn't grab the instance
# and try to modify things. protection
info["pha"] = copy.copy(self._observed_spectrum)
if self._background_spectrum is not None:
info["bak"] = copy.copy(self._background_spectrum)
info["rsp"] = copy.copy(self._rsp)
return info
def display_rsp(self):
"""
Display the currently loaded full response matrix, i.e., RMF and ARF convolved
:return:
"""
self._rsp.plot_matrix()
@property
def response(self):
return self._rsp
def _output(self):
# type: () -> pd.Series
super_out = super(DispersionSpectrumLike, self)._output() # type: pd.Series
the_df = pd.Series({"response": self._rsp.rsp_filename})
return super_out.append(the_df)
def write_pha(self, filename, overwrite=False, force_rsp_write=False):
"""
Writes the observation, background and (optional) rsp to PHAII fits files
:param filename: base file name to write out
:param overwrite: if you would like to force overwriting of the files
:param force_rsp_write: force the writing of an rsp even if not required
"""
# we need to pass up the variables to an OGIPLike
# so that we have the proper variable name
# a local import here because OGIPLike is dependent on this
from threeML.plugins.OGIPLike import OGIPLike
ogiplike = OGIPLike.from_general_dispersion_spectrum(self)
ogiplike.write_pha(
file_name=filename, overwrite=overwrite, force_rsp_write=force_rsp_write
)
@staticmethod
def _build_fake_observation(
fake_data, channel_set, source_errors, source_sys_errors, is_poisson, **kwargs
):
"""
This is the fake observation builder for SpectrumLike which builds data
for a binned spectrum without dispersion. It must be overridden in child classes.
:param fake_data: series of values... they are ignored later
:param channel_set: a channel set
:param source_errors:
:param source_sys_errors:
:param is_poisson:
:return:
"""
assert (
"response" in kwargs
), "A response was not provided. Cannor build synthetic observation"
response = kwargs.pop("response")
observation = BinnedSpectrumWithDispersion(
fake_data,
exposure=1.0,
response=response,
count_errors=source_errors,
sys_errors=source_sys_errors,
quality=None,
scale_factor=1.0,
is_poisson=is_poisson,
mission="fake_mission",
instrument="fake_instrument",
tstart=0.0,
tstop=1.0,
)
return observation
@classmethod
def from_function(
cls,
name,
source_function,
response,
source_errors=None,
source_sys_errors=None,
background_function=None,
background_errors=None,
background_sys_errors=None,
):
"""
Construct a simulated spectrum from a given source function and (optional) background function. If source and/or background errors are not supplied, the likelihood is assumed to be Poisson.
:param name: simulated data set name
:param source_function: astromodels function
:param response: 3ML Instrument response
:param source_errors: (optional) gaussian source errors
:param source_sys_errors: (optional) systematic source errors
:param background_function: (optional) astromodels background function
:param background_errors: (optional) gaussian background errors
:param background_sys_errors: (optional) background systematic errors
:return: simulated DispersionSpectrumLike plugin
"""
channel_set = ChannelSet.from_instrument_response(response)
energy_min, energy_max = channel_set.bin_stack.T
# pass the variables to the super class
return super(DispersionSpectrumLike, cls).from_function(
name,
source_function,
energy_min,
energy_max,
source_errors,
source_sys_errors,
background_function,
background_errors,
background_sys_errors,
response=response,
)<|fim▁end|>
|
), "the observed spectrum does not have a response"
# assign the response to the plugins
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf8 -*-
"""setup
(C) Franck Barbenoire <[email protected]>
License : GPL v3"""
<|fim▁hole|>from setuptools import find_packages
setup(name = "django-openzoom",
version = "0.1.1",
description = "Django application for displaying very high resolution images",
author = "Franck Barbenoire",
author_email = "[email protected]",
url = "https://github.com/franckinux/django-openzoom",
packages = find_packages(),
include_package_data = True,
zip_safe = False,
classifiers = ['Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content']
)<|fim▁end|>
|
from distutils.core import setup
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#resource, resources, Resources
from flask import Blueprint, render_template, request,flash, redirect, url_for
from app.{resources}.models import {Resources}, {Resources}Schema
{resources} = Blueprint('{resources}', __name__, template_folder='templates')
#http://marshmallow.readthedocs.org/en/latest/quickstart.html#declaring-schemas
schema = {Resources}Schema()
#{Resources}
@{resources}.route('/' )
def {resource}_index():
{resources} = {Resources}.query.all()
results = schema.dump({resources}, many=True).data
return render_template('/{resources}/index.html', results=results)
@{resources}.route('/add' , methods=['POST', 'GET'])
def {resource}_add():
if request.method == 'POST':
#Validate form values by de-serializing the request, http://marshmallow.readthedocs.org/en/latest/quickstart.html#validation
form_errors = schema.validate(request.form.to_dict())
if not form_errors:
{resource}={Resources}({add_fields})
return add({resource}, success_url = '{resources}.{resource}_index', fail_url = '{resources}.{resource}_add')
else:
flash(form_errors)
return render_template('/{resources}/add.html')
@{resources}.route('/update/<int:id>' , methods=['POST', 'GET'])
def {resource}_update (id):
#Get {resource} by primary key:
{resource}={Resources}.query.get_or_404(id)
if request.method == 'POST':
form_errors = schema.validate(request.form.to_dict())
if not form_errors:
{update_fields}
return update({resource} , id, success_url = '{resources}.{resource}_index', fail_url = '{resources}.{resource}_update')
else:
flash(form_errors)
return render_template('/{resources}/update.html', {resource}={resource})
@{resources}.route('/delete/<int:id>' , methods=['POST', 'GET'])
def {resource}_delete (id):
{resource} = {Resources}.query.get_or_404(id)
return delete({resource}, fail_url = '{resources}.{resource}_index')
<|fim▁hole|>#CRUD FUNCTIONS
#Arguments are data to add, function to redirect to if the add was successful and if not
def add (data, success_url = '', fail_url = ''):
add = data.add(data)
#if does not return any error
if not add :
flash("Add was successful")
return redirect(url_for(success_url))
else:
message=add
flash(message)
return redirect(url_for(fail_url))
def update (data, id, success_url = '', fail_url = ''):
update=data.update()
#if does not return any error
if not update :
flash("Update was successful")
return redirect(url_for(success_url))
else:
message=update
flash(message)
return redirect(url_for(fail_url, id=id))
def delete (data, fail_url=''):
delete=data.delete(data)
if not delete :
flash("Delete was successful")
else:
message=delete
flash(message)
return redirect(url_for(fail_url))<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![no_std]
extern crate labrador_ldpc;
use labrador_ldpc::LDPCCode;<|fim▁hole|>
#[panic_handler]
#[no_mangle]
pub fn panic(_info: &core::panic::PanicInfo) -> ! {
loop {}
}
#[no_mangle]
pub extern fn labrador_ldpc_code_n(code: LDPCCode) -> usize {
code.n()
}
#[no_mangle]
pub extern fn labrador_ldpc_code_k(code: LDPCCode) -> usize {
code.k()
}
#[no_mangle]
pub extern fn labrador_ldpc_encode(code: LDPCCode, codeword: *mut u8) {
if (codeword as usize) % 4 == 0 {
let codeword: &mut[u32] = unsafe { slice::from_raw_parts_mut(codeword as *mut u32, code.n()/32) };
code.encode(codeword);
} else {
let codeword: &mut[u8] = unsafe { slice::from_raw_parts_mut(codeword, code.n()/8) };
code.encode(codeword);
}
}
#[no_mangle]
pub extern fn labrador_ldpc_copy_encode(code: LDPCCode, data: *const u8, codeword: *mut u8) {
let data: &[u8] = unsafe { slice::from_raw_parts(data, code.k()/8) };
if (codeword as usize) % 4 == 0 {
let codeword: &mut[u32] = unsafe { slice::from_raw_parts_mut(codeword as *mut u32, code.n()/32) };
code.copy_encode(data, codeword);
} else {
let codeword: &mut[u8] = unsafe { slice::from_raw_parts_mut(codeword, code.n()/8) };
code.copy_encode(data, codeword);
}
}
#[no_mangle]
pub extern fn labrador_ldpc_bf_working_len(code: LDPCCode) -> usize {
code.decode_bf_working_len()
}
#[no_mangle]
pub extern fn labrador_ldpc_ms_working_len(code: LDPCCode) -> usize {
code.decode_ms_working_len()
}
#[no_mangle]
pub extern fn labrador_ldpc_ms_working_u8_len(code: LDPCCode) -> usize {
code.decode_ms_working_u8_len()
}
#[no_mangle]
pub extern fn labrador_ldpc_output_len(code: LDPCCode) -> usize {
code.output_len()
}
#[no_mangle]
pub extern fn labrador_ldpc_decode_bf(code: LDPCCode, input: *const u8, output: *mut u8,
working: *mut u8, max_iters: usize,
iters_run: *mut usize) -> bool
{
let input: &[u8] = unsafe { slice::from_raw_parts(input, code.n()/8) };
let output: &mut[u8] = unsafe { slice::from_raw_parts_mut(output, code.output_len()) };
let working: &mut[u8] = unsafe { slice::from_raw_parts_mut(working, code.decode_bf_working_len()) };
let (result, iters) = code.decode_bf(input, output, working, max_iters);
if !iters_run.is_null() {
unsafe { *iters_run = iters };
}
result
}
fn decode_ms<T: DecodeFrom>(code: LDPCCode, llrs: *const T, output: *mut u8, working: *mut T,
working_u8: *mut u8, max_iters: usize, iters_run: *mut usize) -> bool
{
let llrs: &[T] = unsafe { slice::from_raw_parts(llrs, code.n()) };
let output: &mut[u8] = unsafe { slice::from_raw_parts_mut(output, code.output_len()) };
let working: &mut[T] = unsafe { slice::from_raw_parts_mut(working, code.decode_ms_working_len()) };
let working_u8: &mut[u8] = unsafe { slice::from_raw_parts_mut(working_u8, code.decode_ms_working_u8_len()) };
let (result, iters) = code.decode_ms(llrs, output, working, working_u8, max_iters);
if !iters_run.is_null() {
unsafe { *iters_run = iters };
}
result
}
#[no_mangle]
pub extern fn labrador_ldpc_decode_ms_i8(code: LDPCCode, llrs: *const i8, output: *mut u8,
working: *mut i8, working_u8: *mut u8, max_iters: usize,
iters_run: *mut usize) -> bool
{
decode_ms::<i8>(code, llrs, output, working, working_u8, max_iters, iters_run)
}
#[no_mangle]
pub extern fn labrador_ldpc_decode_ms_i16(code: LDPCCode, llrs: *const i16, output: *mut u8,
working: *mut i16, working_u8: *mut u8, max_iters: usize,
iters_run: *mut usize) -> bool
{
decode_ms::<i16>(code, llrs, output, working, working_u8, max_iters, iters_run)
}
#[no_mangle]
pub extern fn labrador_ldpc_decode_ms_f32(code: LDPCCode, llrs: *const f32, output: *mut u8,
working: *mut f32, working_u8: *mut u8, max_iters: usize,
iters_run: *mut usize) -> bool
{
decode_ms::<f32>(code, llrs, output, working, working_u8, max_iters, iters_run)
}
#[no_mangle]
pub extern fn labrador_ldpc_decode_ms_f64(code: LDPCCode, llrs: *const f64, output: *mut u8,
working: *mut f64, working_u8: *mut u8, max_iters: usize,
iters_run: *mut usize) -> bool
{
decode_ms::<f64>(code, llrs, output, working, working_u8, max_iters, iters_run)
}
fn hard_to_llrs<T: DecodeFrom>(code: LDPCCode, input: *const u8, llrs: *mut T) {
let input: &[u8] = unsafe { slice::from_raw_parts(input, code.n()/8) };
let llrs: &mut[T] = unsafe { slice::from_raw_parts_mut(llrs, code.n()) };
code.hard_to_llrs(input, llrs);
}
#[no_mangle]
pub extern fn labrador_ldpc_hard_to_llrs_i8(code: LDPCCode, input: *const u8, llrs: *mut i8) {
hard_to_llrs::<i8>(code, input, llrs);
}
#[no_mangle]
pub extern fn labrador_ldpc_hard_to_llrs_i16(code: LDPCCode, input: *const u8, llrs: *mut i16) {
hard_to_llrs::<i16>(code, input, llrs);
}
#[no_mangle]
pub extern fn labrador_ldpc_hard_to_llrs_f32(code: LDPCCode, input: *const u8, llrs: *mut f32) {
hard_to_llrs::<f32>(code, input, llrs);
}
#[no_mangle]
pub extern fn labrador_ldpc_hard_to_llrs_f64(code: LDPCCode, input: *const u8, llrs: *mut f64) {
hard_to_llrs::<f64>(code, input, llrs);
}
fn llrs_to_hard<T: DecodeFrom>(code: LDPCCode, llrs: *const T, output: *mut u8) {
let llrs: &[T] = unsafe { slice::from_raw_parts(llrs, code.n()) };
let output: &mut[u8] = unsafe { slice::from_raw_parts_mut(output, code.n() / 8) };
code.llrs_to_hard(llrs, output);
}
#[no_mangle]
pub extern fn labrador_ldpc_llrs_to_hard_i8(code: LDPCCode, llrs: *const i8, output: *mut u8) {
llrs_to_hard::<i8>(code, llrs, output);
}
#[no_mangle]
pub extern fn labrador_ldpc_llrs_to_hard_i16(code: LDPCCode, llrs: *const i16, output: *mut u8) {
llrs_to_hard::<i16>(code, llrs, output);
}
#[no_mangle]
pub extern fn labrador_ldpc_llrs_to_hard_f32(code: LDPCCode, llrs: *const f32, output: *mut u8) {
llrs_to_hard::<f32>(code, llrs, output);
}
#[no_mangle]
pub extern fn labrador_ldpc_llrs_to_hard_f64(code: LDPCCode, llrs: *const f64, output: *mut u8) {
llrs_to_hard::<f64>(code, llrs, output);
}<|fim▁end|>
|
use labrador_ldpc::decoder::DecodeFrom;
use core::slice;
|
<|file_name|>DatabaseObject.ts<|end_file_name|><|fim▁begin|>export class DatabaseObject {<|fim▁hole|><|fim▁end|>
|
documentType?: string;
}
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Cloud'
db.create_table(u'cloudslave_cloud', (
('name', self.gf('django.db.models.fields.CharField')(max_length=200, primary_key=True)),
('endpoint', self.gf('django.db.models.fields.URLField')(max_length=200)),
('user_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('tenant_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('password', self.gf('django.db.models.fields.CharField')(max_length=200)),
('region', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('flavor_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('image_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('floating_ip_mode', self.gf('django.db.models.fields.SmallIntegerField')(default=0)),
))
db.send_create_signal(u'cloudslave', ['Cloud'])
# Adding model 'KeyPair'
db.create_table(u'cloudslave_keypair', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cloud', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cloudslave.Cloud'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('private_key', self.gf('django.db.models.fields.TextField')()),
('public_key', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'cloudslave', ['KeyPair'])
# Adding unique constraint on 'KeyPair', fields ['cloud', 'name']
db.create_unique(u'cloudslave_keypair', ['cloud_id', 'name'])
# Adding model 'Reservation'
db.create_table(u'cloudslave_reservation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cloud', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cloudslave.Cloud'])),
('number_of_slaves', self.gf('django.db.models.fields.IntegerField')()),
('state', self.gf('django.db.models.fields.SmallIntegerField')(default=0)),
('timeout', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal(u'cloudslave', ['Reservation'])
# Adding model 'Slave'
db.create_table(u'cloudslave_slave', (
('name', self.gf('django.db.models.fields.CharField')(max_length=200, primary_key=True)),
('reservation', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cloudslave.Reservation'])),
('cloud_node_id', self.gf('django.db.models.fields.CharField')(max_length=200)),
('state', self.gf('django.db.models.fields.CharField')(max_length=15, null=True, blank=True)),
))
db.send_create_signal(u'cloudslave', ['Slave'])
def backwards(self, orm):
# Removing unique constraint on 'KeyPair', fields ['cloud', 'name']
db.delete_unique(u'cloudslave_keypair', ['cloud_id', 'name'])
# Deleting model 'Cloud'
db.delete_table(u'cloudslave_cloud')
# Deleting model 'KeyPair'
db.delete_table(u'cloudslave_keypair')
# Deleting model 'Reservation'
db.delete_table(u'cloudslave_reservation')
# Deleting model 'Slave'
db.delete_table(u'cloudslave_slave')
models = {
u'cloudslave.cloud': {
'Meta': {'object_name': 'Cloud'},
'endpoint': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'flavor_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'floating_ip_mode': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),<|fim▁hole|> 'tenant_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'cloudslave.keypair': {
'Meta': {'unique_together': "(('cloud', 'name'),)", 'object_name': 'KeyPair'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cloudslave.Cloud']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'private_key': ('django.db.models.fields.TextField', [], {}),
'public_key': ('django.db.models.fields.TextField', [], {})
},
u'cloudslave.reservation': {
'Meta': {'object_name': 'Reservation'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cloudslave.Cloud']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number_of_slaves': ('django.db.models.fields.IntegerField', [], {}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'timeout': ('django.db.models.fields.DateTimeField', [], {})
},
u'cloudslave.slave': {
'Meta': {'object_name': 'Slave'},
'cloud_node_id': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'primary_key': 'True'}),
'reservation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cloudslave.Reservation']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['cloudslave']<|fim▁end|>
|
'image_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
<|file_name|>qmp_basic.py<|end_file_name|><|fim▁begin|>from autotest.client.shared import error
from virttest import qemu_monitor
def run(test, params, env):
"""
QMP Specification test-suite: this checks if the *basic* protocol conforms
to its specification, which is file QMP/qmp-spec.txt in QEMU's source tree.
IMPORTANT NOTES:
o Most tests depend heavily on QMP's error information (eg. classes),
this might have bad implications as the error interface is going to
change in QMP
o Command testing is *not* covered in this suite. Each command has its
own specification and should be tested separately
o We use the same terminology as used by the QMP specification,
specially with regard to JSON types (eg. a Python dict is called
a json-object)
o This is divided in sub test-suites, please check the bottom of this
file to check the order in which they are run
TODO:
o Finding which test failed is not as easy as it should be
o Are all those check_*() functions really needed? Wouldn't a
specialized class (eg. a Response class) do better?
"""
def fail_no_key(qmp_dict, key):
if not isinstance(qmp_dict, dict):
raise error.TestFail("qmp_dict is not a dict (it's '%s')" %
type(qmp_dict))
if key not in qmp_dict:
raise error.TestFail("'%s' key doesn't exist in dict ('%s')" %
(key, str(qmp_dict)))
def check_dict_key(qmp_dict, key, keytype):
"""
Performs the following checks on a QMP dict key:
1. qmp_dict is a dict
2. key exists in qmp_dict
3. key is of type keytype
If any of these checks fails, error.TestFail is raised.
"""
fail_no_key(qmp_dict, key)
if not isinstance(qmp_dict[key], keytype):
raise error.TestFail("'%s' key is not of type '%s', it's '%s'" %
(key, keytype, type(qmp_dict[key])))
def check_key_is_dict(qmp_dict, key):
check_dict_key(qmp_dict, key, dict)
def check_key_is_list(qmp_dict, key):
check_dict_key(qmp_dict, key, list)
def check_key_is_str(qmp_dict, key):
check_dict_key(qmp_dict, key, unicode)
def check_str_key(qmp_dict, keyname, value=None):
check_dict_key(qmp_dict, keyname, unicode)
if value and value != qmp_dict[keyname]:
raise error.TestFail("'%s' key value '%s' should be '%s'" %
(keyname, str(qmp_dict[keyname]), str(value)))
def check_key_is_int(qmp_dict, key):
fail_no_key(qmp_dict, key)
try:
int(qmp_dict[key])
except Exception:
raise error.TestFail("'%s' key is not of type int, it's '%s'" %
(key, type(qmp_dict[key])))
def check_bool_key(qmp_dict, keyname, value=None):
check_dict_key(qmp_dict, keyname, bool)
if value and value != qmp_dict[keyname]:
raise error.TestFail("'%s' key value '%s' should be '%s'" %
(keyname, str(qmp_dict[keyname]), str(value)))
def check_success_resp(resp, empty=False):
"""
Check QMP OK response.
:param resp: QMP response
:param empty: if True, response should not contain data to return
"""
check_key_is_dict(resp, "return")
if empty and len(resp["return"]) > 0:
raise error.TestFail("success response is not empty ('%s')" %
str(resp))
def check_error_resp(resp, classname=None, datadict=None):
"""
Check QMP error response.
:param resp: QMP response
:param classname: Expected error class name
:param datadict: Expected error data dictionary
"""
check_key_is_dict(resp, "error")
check_key_is_str(resp["error"], "class")
if classname and resp["error"]["class"] != classname:
raise error.TestFail("got error class '%s' expected '%s'" %
(resp["error"]["class"], classname))
def test_version(version):
"""
Check the QMP greeting message version key which, according to QMP's
documentation, should be:
{ "qemu": { "major": json-int, "minor": json-int, "micro": json-int }
"package": json-string }
"""
check_key_is_dict(version, "qemu")
for key in ("major", "minor", "micro"):
check_key_is_int(version["qemu"], key)
check_key_is_str(version, "package")
def test_greeting(greeting):
check_key_is_dict(greeting, "QMP")
check_key_is_dict(greeting["QMP"], "version")
check_key_is_list(greeting["QMP"], "capabilities")
def greeting_suite(monitor):
"""
Check the greeting message format, as described in the QMP
specfication section '2.2 Server Greeting'.
{ "QMP": { "version": json-object, "capabilities": json-array } }
"""
greeting = monitor.get_greeting()
test_greeting(greeting)
test_version(greeting["QMP"]["version"])
def json_parsing_errors_suite(monitor):
"""
Check that QMP's parser is able to recover from parsing errors, please
check the JSON spec for more info on the JSON syntax (RFC 4627).
"""
# We're quite simple right now and the focus is on parsing errors that
# have already biten us in the past.
#
# TODO: The following test-cases are missing:
#
# - JSON numbers, strings and arrays
# - More invalid characters or malformed structures
# - Valid, but not obvious syntax, like zillion of spaces or
# strings with unicode chars (different suite maybe?)
bad_json = []
# A JSON value MUST be an object, array, number, string, true, false,
# or null
#
# NOTE: QMP seems to ignore a number of chars, like: | and ?
bad_json.append(":")
bad_json.append(",")
# Malformed json-objects
#
# NOTE: sending only "}" seems to break QMP
# NOTE: Duplicate keys are accepted (should it?)
bad_json.append("{ \"execute\" }")
bad_json.append("{ \"execute\": \"query-version\", }")
bad_json.append("{ 1: \"query-version\" }")
bad_json.append("{ true: \"query-version\" }")
bad_json.append("{ []: \"query-version\" }")
bad_json.append("{ {}: \"query-version\" }")
for cmd in bad_json:
resp = monitor.cmd_raw(cmd)
check_error_resp(resp, "GenericError")
def test_id_key(monitor):
"""
Check that QMP's "id" key is correctly handled.
"""
# The "id" key must be echoed back in error responses
id_key = "virt-test"
resp = monitor.cmd_qmp("eject", {"foobar": True}, q_id=id_key)
check_error_resp(resp)
check_str_key(resp, "id", id_key)
# The "id" key must be echoed back in success responses
resp = monitor.cmd_qmp("query-status", q_id=id_key)<|fim▁hole|> # The "id" key can be any json-object
for id_key in (True, 1234, "string again!", [1, [], {}, True, "foo"],
{"key": {}}):
resp = monitor.cmd_qmp("query-status", q_id=id_key)
check_success_resp(resp)
if resp["id"] != id_key:
raise error.TestFail("expected id '%s' but got '%s'" %
(str(id_key), str(resp["id"])))
def test_invalid_arg_key(monitor):
"""
Currently, the only supported keys in the input object are: "execute",
"arguments" and "id". Although expansion is supported, invalid key
names must be detected.
"""
resp = monitor.cmd_obj({"execute": "eject", "foobar": True})
check_error_resp(resp, "GenericError", {"member": "foobar"})
def test_bad_arguments_key_type(monitor):
"""
The "arguments" key must be an json-object.
We use the eject command to perform the tests, but that's a random
choice, any command that accepts arguments will do, as the command
doesn't get called.
"""
for item in (True, [], 1, "foo"):
resp = monitor.cmd_obj({"execute": "eject", "arguments": item})
check_error_resp(resp, "GenericError",
{"member": "arguments", "expected": "object"})
def test_bad_execute_key_type(monitor):
"""
The "execute" key must be a json-string.
"""
for item in (False, 1, {}, []):
resp = monitor.cmd_obj({"execute": item})
check_error_resp(resp, "GenericError",
{"member": "execute", "expected": "string"})
def test_no_execute_key(monitor):
"""
The "execute" key must exist, we also test for some stupid parsing
errors.
"""
for cmd in ({}, {"execut": "qmp_capabilities"},
{"executee": "qmp_capabilities"}, {"foo": "bar"}):
resp = monitor.cmd_obj(cmd)
check_error_resp(resp) # XXX: check class and data dict?
def test_bad_input_obj_type(monitor):
"""
The input object must be... an json-object.
"""
for cmd in ("foo", [], True, 1):
resp = monitor.cmd_obj(cmd)
check_error_resp(resp, "GenericError", {"expected": "object"})
def test_good_input_obj(monitor):
"""
Basic success tests for issuing QMP commands.
"""
# NOTE: We don't use the cmd_qmp() method here because the command
# object is in a 'random' order
resp = monitor.cmd_obj({"execute": "query-version"})
check_success_resp(resp)
resp = monitor.cmd_obj({"arguments": {}, "execute": "query-version"})
check_success_resp(resp)
idd = "1234foo"
resp = monitor.cmd_obj({"id": idd, "execute": "query-version",
"arguments": {}})
check_success_resp(resp)
check_str_key(resp, "id", idd)
# TODO: would be good to test simple argument usage, but we don't have
# a read-only command that accepts arguments.
def input_object_suite(monitor):
"""
Check the input object format, as described in the QMP specfication
section '2.3 Issuing Commands'.
{ "execute": json-string, "arguments": json-object, "id": json-value }
"""
test_good_input_obj(monitor)
test_bad_input_obj_type(monitor)
test_no_execute_key(monitor)
test_bad_execute_key_type(monitor)
test_bad_arguments_key_type(monitor)
test_id_key(monitor)
test_invalid_arg_key(monitor)
def argument_checker_suite(monitor):
"""
Check that QMP's argument checker is detecting all possible errors.
We use a number of different commands to perform the checks, but the
command used doesn't matter much as QMP performs argument checking
_before_ calling the command.
"""
# stop doesn't take arguments
resp = monitor.cmd_qmp("stop", {"foo": 1})
check_error_resp(resp, "GenericError", {"name": "foo"})
# required argument omitted
resp = monitor.cmd_qmp("screendump")
check_error_resp(resp, "GenericError", {"name": "filename"})
# 'bar' is not a valid argument
resp = monitor.cmd_qmp("screendump", {"filename": "outfile",
"bar": "bar"})
check_error_resp(resp, "GenericError", {"name": "bar"})
# test optional argument: 'force' is omitted, but it's optional, so
# the handler has to be called. Test this happens by checking an
# error that is generated by the handler itself.
resp = monitor.cmd_qmp("eject", {"device": "foobar"})
check_error_resp(resp, "DeviceNotFound")
# filename argument must be a json-string
for arg in ({}, [], 1, True):
resp = monitor.cmd_qmp("screendump", {"filename": arg})
check_error_resp(resp, "GenericError",
{"name": "filename", "expected": "string"})
# force argument must be a json-bool
for arg in ({}, [], 1, "foo"):
resp = monitor.cmd_qmp("eject", {"force": arg, "device": "foo"})
check_error_resp(resp, "GenericError",
{"name": "force", "expected": "bool"})
# val argument must be a json-int
for arg in ({}, [], True, "foo"):
resp = monitor.cmd_qmp("memsave", {"val": arg, "filename": "foo",
"size": 10})
check_error_resp(resp, "GenericError",
{"name": "val", "expected": "int"})
# value argument must be a json-number
for arg in ({}, [], True, "foo"):
resp = monitor.cmd_qmp("migrate_set_speed", {"value": arg})
check_error_resp(resp, "GenericError",
{"name": "value", "expected": "number"})
# qdev-type commands have their own argument checker, all QMP does
# is to skip its checking and pass arguments through. Check this
# works by providing invalid options to device_add and expecting
# an error message from qdev
resp = monitor.cmd_qmp("device_add", {"driver": "e1000", "foo": "bar"})
check_error_resp(resp, "GenericError",
{"device": "e1000", "property": "foo"})
def unknown_commands_suite(monitor):
"""
Check that QMP handles unknown commands correctly.
"""
# We also call a HMP-only command, to be sure it will fail as expected
for cmd in ("bar", "query-", "query-foo", "q", "help"):
resp = monitor.cmd_qmp(cmd)
check_error_resp(resp, "CommandNotFound", {"name": cmd})
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
# Look for the first qmp monitor available, otherwise, fail the test
qmp_monitor = vm.get_monitors_by_type("qmp")
if qmp_monitor:
qmp_monitor = qmp_monitor[0]
else:
raise error.TestError('Could not find a QMP monitor, aborting test')
# Run all suites
greeting_suite(qmp_monitor)
input_object_suite(qmp_monitor)
argument_checker_suite(qmp_monitor)
unknown_commands_suite(qmp_monitor)
json_parsing_errors_suite(qmp_monitor)
# check if QMP is still alive
if not qmp_monitor.is_responsive():
raise error.TestFail('QMP monitor is not responsive after testing')<|fim▁end|>
|
check_success_resp(resp)
check_str_key(resp, "id", id_key)
|
<|file_name|>AccountResourceIntTest.java<|end_file_name|><|fim▁begin|>package com.baeldung.jhipster5.web.rest;
import com.baeldung.jhipster5.BookstoreApp;
import com.baeldung.jhipster5.config.Constants;
import com.baeldung.jhipster5.domain.Authority;
import com.baeldung.jhipster5.domain.User;
import com.baeldung.jhipster5.repository.AuthorityRepository;
import com.baeldung.jhipster5.repository.UserRepository;
import com.baeldung.jhipster5.security.AuthoritiesConstants;
import com.baeldung.jhipster5.service.MailService;
import com.baeldung.jhipster5.service.UserService;
import com.baeldung.jhipster5.service.dto.PasswordChangeDTO;
import com.baeldung.jhipster5.service.dto.UserDTO;
import com.baeldung.jhipster5.web.rest.errors.ExceptionTranslator;
import com.baeldung.jhipster5.web.rest.vm.KeyAndPasswordVM;
import com.baeldung.jhipster5.web.rest.vm.ManagedUserVM;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.util.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the AccountResource REST controller.
*
* @see AccountResource
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = BookstoreApp.class)
public class AccountResourceIntTest {
@Autowired
private UserRepository userRepository;
@Autowired
private AuthorityRepository authorityRepository;
@Autowired
private UserService userService;
@Autowired
private PasswordEncoder passwordEncoder;
@Autowired
private HttpMessageConverter<?>[] httpMessageConverters;
@Autowired
private ExceptionTranslator exceptionTranslator;
@Mock
private UserService mockUserService;
@Mock
private MailService mockMailService;
private MockMvc restMvc;
private MockMvc restUserMockMvc;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
doNothing().when(mockMailService).sendActivationEmail(any());
AccountResource accountResource =
new AccountResource(userRepository, userService, mockMailService);
AccountResource accountUserMockResource =
new AccountResource(userRepository, mockUserService, mockMailService);
this.restMvc = MockMvcBuilders.standaloneSetup(accountResource)
.setMessageConverters(httpMessageConverters)
.setControllerAdvice(exceptionTranslator)
.build();
this.restUserMockMvc = MockMvcBuilders.standaloneSetup(accountUserMockResource)
.setControllerAdvice(exceptionTranslator)
.build();
}
@Test
public void testNonAuthenticatedUser() throws Exception {
restUserMockMvc.perform(get("/api/authenticate")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string(""));
}
@Test
public void testAuthenticatedUser() throws Exception {
restUserMockMvc.perform(get("/api/authenticate")
.with(request -> {
request.setRemoteUser("test");
return request;
})
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string("test"));
}
@Test
public void testGetExistingAccount() throws Exception {
Set<Authority> authorities = new HashSet<>();
Authority authority = new Authority();
authority.setName(AuthoritiesConstants.ADMIN);
authorities.add(authority);
User user = new User();
user.setLogin("test");
user.setFirstName("john");
user.setLastName("doe");
user.setEmail("[email protected]");
user.setImageUrl("http://placehold.it/50x50");
user.setLangKey("en");
user.setAuthorities(authorities);
when(mockUserService.getUserWithAuthorities()).thenReturn(Optional.of(user));
restUserMockMvc.perform(get("/api/account")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.login").value("test"))
.andExpect(jsonPath("$.firstName").value("john"))
.andExpect(jsonPath("$.lastName").value("doe"))
.andExpect(jsonPath("$.email").value("[email protected]"))
.andExpect(jsonPath("$.imageUrl").value("http://placehold.it/50x50"))
.andExpect(jsonPath("$.langKey").value("en"))
.andExpect(jsonPath("$.authorities").value(AuthoritiesConstants.ADMIN));
}
@Test
public void testGetUnknownAccount() throws Exception {
when(mockUserService.getUserWithAuthorities()).thenReturn(Optional.empty());
restUserMockMvc.perform(get("/api/account")
.accept(MediaType.APPLICATION_PROBLEM_JSON))
.andExpect(status().isInternalServerError());
}
@Test
@Transactional
public void testRegisterValid() throws Exception {
ManagedUserVM validUser = new ManagedUserVM();
validUser.setLogin("test-register-valid");
validUser.setPassword("password");
validUser.setFirstName("Alice");
validUser.setLastName("Test");
validUser.setEmail("[email protected]");
validUser.setImageUrl("http://placehold.it/50x50");
validUser.setLangKey(Constants.DEFAULT_LANGUAGE);
validUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
assertThat(userRepository.findOneByLogin("test-register-valid").isPresent()).isFalse();
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
assertThat(userRepository.findOneByLogin("test-register-valid").isPresent()).isTrue();
}
@Test
@Transactional
public void testRegisterInvalidLogin() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("funky-log!n");// <-- invalid
invalidUser.setPassword("password");
invalidUser.setFirstName("Funky");
invalidUser.setLastName("One");
invalidUser.setEmail("[email protected]");
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByEmailIgnoreCase("[email protected]");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterInvalidEmail() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("bob");
invalidUser.setPassword("password");
invalidUser.setFirstName("Bob");
invalidUser.setLastName("Green");
invalidUser.setEmail("invalid");// <-- invalid
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterInvalidPassword() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("bob");
invalidUser.setPassword("123");// password with only 3 digits
invalidUser.setFirstName("Bob");
invalidUser.setLastName("Green");
invalidUser.setEmail("[email protected]");
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterNullPassword() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM();
invalidUser.setLogin("bob");
invalidUser.setPassword(null);// invalid null password
invalidUser.setFirstName("Bob");
invalidUser.setLastName("Green");
invalidUser.setEmail("[email protected]");
invalidUser.setActivated(true);
invalidUser.setImageUrl("http://placehold.it/50x50");
invalidUser.setLangKey(Constants.DEFAULT_LANGUAGE);
invalidUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterDuplicateLogin() throws Exception {
// First registration
ManagedUserVM firstUser = new ManagedUserVM();
firstUser.setLogin("alice");
firstUser.setPassword("password");
firstUser.setFirstName("Alice");
firstUser.setLastName("Something");
firstUser.setEmail("[email protected]");
firstUser.setImageUrl("http://placehold.it/50x50");
firstUser.setLangKey(Constants.DEFAULT_LANGUAGE);
firstUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// Duplicate login, different email
ManagedUserVM secondUser = new ManagedUserVM();
secondUser.setLogin(firstUser.getLogin());
secondUser.setPassword(firstUser.getPassword());
secondUser.setFirstName(firstUser.getFirstName());
secondUser.setLastName(firstUser.getLastName());
secondUser.setEmail("[email protected]");
secondUser.setImageUrl(firstUser.getImageUrl());
secondUser.setLangKey(firstUser.getLangKey());
secondUser.setCreatedBy(firstUser.getCreatedBy());
secondUser.setCreatedDate(firstUser.getCreatedDate());
secondUser.setLastModifiedBy(firstUser.getLastModifiedBy());
secondUser.setLastModifiedDate(firstUser.getLastModifiedDate());
secondUser.setAuthorities(new HashSet<>(firstUser.getAuthorities()));
// First user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(firstUser)))
.andExpect(status().isCreated());
// Second (non activated) user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().isCreated());
Optional<User> testUser = userRepository.findOneByEmailIgnoreCase("[email protected]");
assertThat(testUser.isPresent()).isTrue();
testUser.get().setActivated(true);
userRepository.save(testUser.get());
// Second (already activated) user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().is4xxClientError());
}
@Test
@Transactional
public void testRegisterDuplicateEmail() throws Exception {
// First user
ManagedUserVM firstUser = new ManagedUserVM();
firstUser.setLogin("test-register-duplicate-email");
firstUser.setPassword("password");
firstUser.setFirstName("Alice");
firstUser.setLastName("Test");
firstUser.setEmail("[email protected]");
firstUser.setImageUrl("http://placehold.it/50x50");
firstUser.setLangKey(Constants.DEFAULT_LANGUAGE);
firstUser.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// Register first user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(firstUser)))
.andExpect(status().isCreated());
Optional<User> testUser1 = userRepository.findOneByLogin("test-register-duplicate-email");
assertThat(testUser1.isPresent()).isTrue();
// Duplicate email, different login
ManagedUserVM secondUser = new ManagedUserVM();
secondUser.setLogin("test-register-duplicate-email-2");
secondUser.setPassword(firstUser.getPassword());
secondUser.setFirstName(firstUser.getFirstName());
secondUser.setLastName(firstUser.getLastName());
secondUser.setEmail(firstUser.getEmail());
secondUser.setImageUrl(firstUser.getImageUrl());
secondUser.setLangKey(firstUser.getLangKey());
secondUser.setAuthorities(new HashSet<>(firstUser.getAuthorities()));
// Register second (non activated) user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().isCreated());
Optional<User> testUser2 = userRepository.findOneByLogin("test-register-duplicate-email");
assertThat(testUser2.isPresent()).isFalse();
Optional<User> testUser3 = userRepository.findOneByLogin("test-register-duplicate-email-2");
assertThat(testUser3.isPresent()).isTrue();
// Duplicate email - with uppercase email address
ManagedUserVM userWithUpperCaseEmail = new ManagedUserVM();
userWithUpperCaseEmail.setId(firstUser.getId());
userWithUpperCaseEmail.setLogin("test-register-duplicate-email-3");
userWithUpperCaseEmail.setPassword(firstUser.getPassword());
userWithUpperCaseEmail.setFirstName(firstUser.getFirstName());
userWithUpperCaseEmail.setLastName(firstUser.getLastName());
userWithUpperCaseEmail.setEmail("[email protected]");<|fim▁hole|>
// Register third (not activated) user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(userWithUpperCaseEmail)))
.andExpect(status().isCreated());
Optional<User> testUser4 = userRepository.findOneByLogin("test-register-duplicate-email-3");
assertThat(testUser4.isPresent()).isTrue();
assertThat(testUser4.get().getEmail()).isEqualTo("[email protected]");
testUser4.get().setActivated(true);
userService.updateUser((new UserDTO(testUser4.get())));
// Register 4th (already activated) user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(secondUser)))
.andExpect(status().is4xxClientError());
}
@Test
@Transactional
public void testRegisterAdminIsIgnored() throws Exception {
ManagedUserVM validUser = new ManagedUserVM();
validUser.setLogin("badguy");
validUser.setPassword("password");
validUser.setFirstName("Bad");
validUser.setLastName("Guy");
validUser.setEmail("[email protected]");
validUser.setActivated(true);
validUser.setImageUrl("http://placehold.it/50x50");
validUser.setLangKey(Constants.DEFAULT_LANGUAGE);
validUser.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
Optional<User> userDup = userRepository.findOneByLogin("badguy");
assertThat(userDup.isPresent()).isTrue();
assertThat(userDup.get().getAuthorities()).hasSize(1)
.containsExactly(authorityRepository.findById(AuthoritiesConstants.USER).get());
}
@Test
@Transactional
public void testActivateAccount() throws Exception {
final String activationKey = "some activation key";
User user = new User();
user.setLogin("activate-account");
user.setEmail("[email protected]");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(false);
user.setActivationKey(activationKey);
userRepository.saveAndFlush(user);
restMvc.perform(get("/api/activate?key={activationKey}", activationKey))
.andExpect(status().isOk());
user = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(user.getActivated()).isTrue();
}
@Test
@Transactional
public void testActivateAccountWithWrongKey() throws Exception {
restMvc.perform(get("/api/activate?key=wrongActivationKey"))
.andExpect(status().isInternalServerError());
}
@Test
@Transactional
@WithMockUser("save-account")
public void testSaveAccount() throws Exception {
User user = new User();
user.setLogin("save-account");
user.setEmail("[email protected]");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
UserDTO userDTO = new UserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("[email protected]");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restMvc.perform(
post("/api/account")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(updatedUser.getFirstName()).isEqualTo(userDTO.getFirstName());
assertThat(updatedUser.getLastName()).isEqualTo(userDTO.getLastName());
assertThat(updatedUser.getEmail()).isEqualTo(userDTO.getEmail());
assertThat(updatedUser.getLangKey()).isEqualTo(userDTO.getLangKey());
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
assertThat(updatedUser.getImageUrl()).isEqualTo(userDTO.getImageUrl());
assertThat(updatedUser.getActivated()).isEqualTo(true);
assertThat(updatedUser.getAuthorities()).isEmpty();
}
@Test
@Transactional
@WithMockUser("save-invalid-email")
public void testSaveInvalidEmail() throws Exception {
User user = new User();
user.setLogin("save-invalid-email");
user.setEmail("[email protected]");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
UserDTO userDTO = new UserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("invalid email");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restMvc.perform(
post("/api/account")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isBadRequest());
assertThat(userRepository.findOneByEmailIgnoreCase("invalid email")).isNotPresent();
}
@Test
@Transactional
@WithMockUser("save-existing-email")
public void testSaveExistingEmail() throws Exception {
User user = new User();
user.setLogin("save-existing-email");
user.setEmail("[email protected]");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
User anotherUser = new User();
anotherUser.setLogin("save-existing-email2");
anotherUser.setEmail("[email protected]");
anotherUser.setPassword(RandomStringUtils.random(60));
anotherUser.setActivated(true);
userRepository.saveAndFlush(anotherUser);
UserDTO userDTO = new UserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("[email protected]");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restMvc.perform(
post("/api/account")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("save-existing-email").orElse(null);
assertThat(updatedUser.getEmail()).isEqualTo("[email protected]");
}
@Test
@Transactional
@WithMockUser("save-existing-email-and-login")
public void testSaveExistingEmailAndLogin() throws Exception {
User user = new User();
user.setLogin("save-existing-email-and-login");
user.setEmail("[email protected]");
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
userRepository.saveAndFlush(user);
UserDTO userDTO = new UserDTO();
userDTO.setLogin("not-used");
userDTO.setFirstName("firstname");
userDTO.setLastName("lastname");
userDTO.setEmail("[email protected]");
userDTO.setActivated(false);
userDTO.setImageUrl("http://placehold.it/50x50");
userDTO.setLangKey(Constants.DEFAULT_LANGUAGE);
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.ADMIN));
restMvc.perform(
post("/api/account")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(userDTO)))
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin("save-existing-email-and-login").orElse(null);
assertThat(updatedUser.getEmail()).isEqualTo("[email protected]");
}
@Test
@Transactional
@WithMockUser("change-password-wrong-existing-password")
public void testChangePasswordWrongExistingPassword() throws Exception {
User user = new User();
String currentPassword = RandomStringUtils.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password-wrong-existing-password");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
restMvc.perform(post("/api/account/change-password")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO("1"+currentPassword, "new password"))))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-wrong-existing-password").orElse(null);
assertThat(passwordEncoder.matches("new password", updatedUser.getPassword())).isFalse();
assertThat(passwordEncoder.matches(currentPassword, updatedUser.getPassword())).isTrue();
}
@Test
@Transactional
@WithMockUser("change-password")
public void testChangePassword() throws Exception {
User user = new User();
String currentPassword = RandomStringUtils.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
restMvc.perform(post("/api/account/change-password")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, "new password"))))
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin("change-password").orElse(null);
assertThat(passwordEncoder.matches("new password", updatedUser.getPassword())).isTrue();
}
@Test
@Transactional
@WithMockUser("change-password-too-small")
public void testChangePasswordTooSmall() throws Exception {
User user = new User();
String currentPassword = RandomStringUtils.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password-too-small");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
String newPassword = RandomStringUtils.random(ManagedUserVM.PASSWORD_MIN_LENGTH - 1);
restMvc.perform(post("/api/account/change-password")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, newPassword))))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-too-small").orElse(null);
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
}
@Test
@Transactional
@WithMockUser("change-password-too-long")
public void testChangePasswordTooLong() throws Exception {
User user = new User();
String currentPassword = RandomStringUtils.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password-too-long");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
String newPassword = RandomStringUtils.random(ManagedUserVM.PASSWORD_MAX_LENGTH + 1);
restMvc.perform(post("/api/account/change-password")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, newPassword))))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-too-long").orElse(null);
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
}
@Test
@Transactional
@WithMockUser("change-password-empty")
public void testChangePasswordEmpty() throws Exception {
User user = new User();
String currentPassword = RandomStringUtils.random(60);
user.setPassword(passwordEncoder.encode(currentPassword));
user.setLogin("change-password-empty");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
restMvc.perform(post("/api/account/change-password")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(new PasswordChangeDTO(currentPassword, ""))))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin("change-password-empty").orElse(null);
assertThat(updatedUser.getPassword()).isEqualTo(user.getPassword());
}
@Test
@Transactional
public void testRequestPasswordReset() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
user.setLogin("password-reset");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
restMvc.perform(post("/api/account/reset-password/init")
.content("[email protected]"))
.andExpect(status().isOk());
}
@Test
@Transactional
public void testRequestPasswordResetUpperCaseEmail() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
user.setLogin("password-reset");
user.setEmail("[email protected]");
userRepository.saveAndFlush(user);
restMvc.perform(post("/api/account/reset-password/init")
.content("[email protected]"))
.andExpect(status().isOk());
}
@Test
public void testRequestPasswordResetWrongEmail() throws Exception {
restMvc.perform(
post("/api/account/reset-password/init")
.content("[email protected]"))
.andExpect(status().isBadRequest());
}
@Test
@Transactional
public void testFinishPasswordReset() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setLogin("finish-password-reset");
user.setEmail("[email protected]");
user.setResetDate(Instant.now().plusSeconds(60));
user.setResetKey("reset key");
userRepository.saveAndFlush(user);
KeyAndPasswordVM keyAndPassword = new KeyAndPasswordVM();
keyAndPassword.setKey(user.getResetKey());
keyAndPassword.setNewPassword("new password");
restMvc.perform(
post("/api/account/reset-password/finish")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(keyAndPassword)))
.andExpect(status().isOk());
User updatedUser = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(passwordEncoder.matches(keyAndPassword.getNewPassword(), updatedUser.getPassword())).isTrue();
}
@Test
@Transactional
public void testFinishPasswordResetTooSmall() throws Exception {
User user = new User();
user.setPassword(RandomStringUtils.random(60));
user.setLogin("finish-password-reset-too-small");
user.setEmail("[email protected]");
user.setResetDate(Instant.now().plusSeconds(60));
user.setResetKey("reset key too small");
userRepository.saveAndFlush(user);
KeyAndPasswordVM keyAndPassword = new KeyAndPasswordVM();
keyAndPassword.setKey(user.getResetKey());
keyAndPassword.setNewPassword("foo");
restMvc.perform(
post("/api/account/reset-password/finish")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(keyAndPassword)))
.andExpect(status().isBadRequest());
User updatedUser = userRepository.findOneByLogin(user.getLogin()).orElse(null);
assertThat(passwordEncoder.matches(keyAndPassword.getNewPassword(), updatedUser.getPassword())).isFalse();
}
@Test
@Transactional
public void testFinishPasswordResetWrongKey() throws Exception {
KeyAndPasswordVM keyAndPassword = new KeyAndPasswordVM();
keyAndPassword.setKey("wrong reset key");
keyAndPassword.setNewPassword("new password");
restMvc.perform(
post("/api/account/reset-password/finish")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(keyAndPassword)))
.andExpect(status().isInternalServerError());
}
}<|fim▁end|>
|
userWithUpperCaseEmail.setImageUrl(firstUser.getImageUrl());
userWithUpperCaseEmail.setLangKey(firstUser.getLangKey());
userWithUpperCaseEmail.setAuthorities(new HashSet<>(firstUser.getAuthorities()));
|
<|file_name|>TileElement.cpp<|end_file_name|><|fim▁begin|>/*****************************************************************************
* Copyright (c) 2014-2019 OpenRCT2 developers
*
* For a complete list of all authors, please refer to contributors.md
* Interested in contributing? Visit https://github.com/OpenRCT2/OpenRCT2
*
* OpenRCT2 is licensed under the GNU General Public License version 3.
*****************************************************************************/
#include "TileElement.h"
#include "../core/Guard.hpp"
#include "../interface/Window.h"
#include "../localisation/Localisation.h"
#include "../ride/Track.h"
#include "Banner.h"
#include "LargeScenery.h"
#include "Scenery.h"
uint8_t TileElementBase::GetType() const
{
return this->type & TILE_ELEMENT_TYPE_MASK;
}
void TileElementBase::SetType(uint8_t newType)
{
this->type &= ~TILE_ELEMENT_TYPE_MASK;
this->type |= (newType & TILE_ELEMENT_TYPE_MASK);
}
Direction TileElementBase::GetDirection() const
{
return this->type & TILE_ELEMENT_DIRECTION_MASK;
}
void TileElementBase::SetDirection(Direction direction)
{
this->type &= ~TILE_ELEMENT_DIRECTION_MASK;
this->type |= (direction & TILE_ELEMENT_DIRECTION_MASK);
}
Direction TileElementBase::GetDirectionWithOffset(uint8_t offset) const
{
return ((this->type & TILE_ELEMENT_DIRECTION_MASK) + offset) & TILE_ELEMENT_DIRECTION_MASK;
}
bool TileElementBase::IsLastForTile() const
{
return (this->flags & TILE_ELEMENT_FLAG_LAST_TILE) != 0;
}
void TileElementBase::SetLastForTile(bool on)
{
if (on)
flags |= TILE_ELEMENT_FLAG_LAST_TILE;
else
flags &= ~TILE_ELEMENT_FLAG_LAST_TILE;
}
bool TileElementBase::IsGhost() const
{
return (this->flags & TILE_ELEMENT_FLAG_GHOST) != 0;
}
void TileElementBase::SetGhost(bool isGhost)
{
if (isGhost)
{
this->flags |= TILE_ELEMENT_FLAG_GHOST;
}
else
{
this->flags &= ~TILE_ELEMENT_FLAG_GHOST;
}
}
bool tile_element_is_underground(TileElement* tileElement)
{
do
{
tileElement++;
if ((tileElement - 1)->IsLastForTile())
return false;
} while (tileElement->GetType() != TILE_ELEMENT_TYPE_SURFACE);
return true;
}
BannerIndex tile_element_get_banner_index(TileElement* tileElement)
{
rct_scenery_entry* sceneryEntry;
switch (tileElement->GetType())
{
case TILE_ELEMENT_TYPE_LARGE_SCENERY:
sceneryEntry = tileElement->AsLargeScenery()->GetEntry();
if (sceneryEntry->large_scenery.scrolling_mode == SCROLLING_MODE_NONE)
return BANNER_INDEX_NULL;
return tileElement->AsLargeScenery()->GetBannerIndex();
case TILE_ELEMENT_TYPE_WALL:
sceneryEntry = tileElement->AsWall()->GetEntry();
if (sceneryEntry == nullptr || sceneryEntry->wall.scrolling_mode == SCROLLING_MODE_NONE)
return BANNER_INDEX_NULL;
return tileElement->AsWall()->GetBannerIndex();
case TILE_ELEMENT_TYPE_BANNER:
return tileElement->AsBanner()->GetIndex();
default:
return BANNER_INDEX_NULL;
}
}
void tile_element_set_banner_index(TileElement* tileElement, BannerIndex bannerIndex)
{
switch (tileElement->GetType())
{
case TILE_ELEMENT_TYPE_WALL:
tileElement->AsWall()->SetBannerIndex(bannerIndex);
break;
case TILE_ELEMENT_TYPE_LARGE_SCENERY:
tileElement->AsLargeScenery()->SetBannerIndex(bannerIndex);
break;
case TILE_ELEMENT_TYPE_BANNER:
tileElement->AsBanner()->SetIndex(bannerIndex);
break;
default:
log_error("Tried to set banner index on unsuitable tile element!");<|fim▁hole|>
void tile_element_remove_banner_entry(TileElement* tileElement)
{
auto bannerIndex = tile_element_get_banner_index(tileElement);
auto banner = GetBanner(bannerIndex);
if (banner != nullptr)
{
window_close_by_number(WC_BANNER, bannerIndex);
*banner = {};
}
}
uint8_t tile_element_get_ride_index(const TileElement* tileElement)
{
switch (tileElement->GetType())
{
case TILE_ELEMENT_TYPE_TRACK:
return tileElement->AsTrack()->GetRideIndex();
case TILE_ELEMENT_TYPE_ENTRANCE:
return tileElement->AsEntrance()->GetRideIndex();
case TILE_ELEMENT_TYPE_PATH:
return tileElement->AsPath()->GetRideIndex();
default:
return RIDE_ID_NULL;
}
}
void TileElement::ClearAs(uint8_t newType)
{
type = newType;
flags = 0;
base_height = 2;
clearance_height = 2;
std::fill_n(pad_04, sizeof(pad_04), 0x00);
std::fill_n(pad_08, sizeof(pad_08), 0x00);
}
void TileElementBase::Remove()
{
tile_element_remove((TileElement*)this);
}
// Rotate both of the values amount
const QuarterTile QuarterTile::Rotate(uint8_t amount) const
{
switch (amount)
{
case 0:
return QuarterTile{ *this };
break;
case 1:
{
auto rotVal1 = _val << 1;
auto rotVal2 = rotVal1 >> 4;
// Clear the bit from the tileQuarter
rotVal1 &= 0b11101110;
// Clear the bit from the zQuarter
rotVal2 &= 0b00010001;
return QuarterTile{ static_cast<uint8_t>(rotVal1 | rotVal2) };
}
case 2:
{
auto rotVal1 = _val << 2;
auto rotVal2 = rotVal1 >> 4;
// Clear the bit from the tileQuarter
rotVal1 &= 0b11001100;
// Clear the bit from the zQuarter
rotVal2 &= 0b00110011;
return QuarterTile{ static_cast<uint8_t>(rotVal1 | rotVal2) };
}
case 3:
{
auto rotVal1 = _val << 3;
auto rotVal2 = rotVal1 >> 4;
// Clear the bit from the tileQuarter
rotVal1 &= 0b10001000;
// Clear the bit from the zQuarter
rotVal2 &= 0b01110111;
return QuarterTile{ static_cast<uint8_t>(rotVal1 | rotVal2) };
}
default:
log_error("Tried to rotate QuarterTile invalid amount.");
return QuarterTile{ 0 };
}
}
uint8_t TileElementBase::GetOccupiedQuadrants() const
{
return flags & TILE_ELEMENT_OCCUPIED_QUADRANTS_MASK;
}
void TileElementBase::SetOccupiedQuadrants(uint8_t quadrants)
{
flags &= ~TILE_ELEMENT_OCCUPIED_QUADRANTS_MASK;
flags |= (quadrants & TILE_ELEMENT_OCCUPIED_QUADRANTS_MASK);
}
int32_t TileElementBase::GetBaseZ() const
{
return base_height * 8;
}
void TileElementBase::SetBaseZ(int32_t newZ)
{
base_height = (newZ / 8);
}
int32_t TileElementBase::GetClearanceZ() const
{
return clearance_height * 8;
}
void TileElementBase::SetClearanceZ(int32_t newZ)
{
clearance_height = (newZ / 8);
}<|fim▁end|>
|
Guard::Assert(false);
}
}
|
<|file_name|>unique-decl.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>
// pretty-expanded FIXME #23616
pub fn main() {
let _: Box<isize>;
}
fn f(_i: Box<isize>) -> Box<isize> {
panic!();
}<|fim▁end|>
| |
<|file_name|>parityBackground.js<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
class ParityBackground extends Component {
static propTypes = {
style: PropTypes.object.isRequired,
children: PropTypes.node,
className: PropTypes.string,
onClick: PropTypes.func
};
render () {
const { children, className, style, onClick } = this.props;
return (
<div
className={ className }
style={ style }
onTouchTap={ onClick }>
{ children }
</div>
);
}
}
function mapStateToProps (_, initProps) {
const { gradient, seed, muiTheme } = initProps;
let _seed = seed;
let _props = { style: muiTheme.parity.getBackgroundStyle(gradient, seed) };
return (state, props) => {
const { backgroundSeed } = state.settings;
const { seed } = props;
const newSeed = seed || backgroundSeed;
if (newSeed === _seed) {
return _props;
}
_seed = newSeed;
_props = { style: muiTheme.parity.getBackgroundStyle(gradient, newSeed) };<|fim▁hole|>
export default connect(
mapStateToProps
)(ParityBackground);<|fim▁end|>
|
return _props;
};
}
|
<|file_name|>rpc.py<|end_file_name|><|fim▁begin|># Copyright 2011 Jeff Garzik
#
# RawProxy has the following improvements over python-jsonrpc's ServiceProxy
# class:
#
# - HTTP connections persist for the life of the RawProxy object (if server
# supports HTTP/1.1)
# - sends protocol 'version', per JSON-RPC 1.1
# - sends proper, incrementing 'id'
# - sends Basic HTTP authentication headers
# - parses all JSON numbers that look like floats as Decimal
# - uses standard Python json lib
#
# Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
#
# Copyright (c) 2007 Jan-Klaas Kollhof
#
# This file is part of jsonrpc.
#
# jsonrpc is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Bitcoin Core RPC support"""
from __future__ import absolute_import, division, print_function, unicode_literals
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import binascii
import decimal
import json
import os
import platform
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
import bitcoin
from bitcoin.core import COIN, lx, b2lx, CBlock, CTransaction, COutPoint, CTxOut
from bitcoin.core.script import CScript
from bitcoin.wallet import CBitcoinAddress
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
# (un)hexlify to/from unicode, needed for Python3
unhexlify = binascii.unhexlify
hexlify = binascii.hexlify
if sys.version > '3':
unhexlify = lambda h: binascii.unhexlify(h.encode('utf8'))
hexlify = lambda b: binascii.hexlify(b).decode('utf8')
class JSONRPCException(Exception):
def __init__(self, rpc_error):
super(JSONRPCException, self).__init__('msg: %r code: %r' %
(rpc_error['message'], rpc_error['code']))
self.error = rpc_error
class RawProxy(object):
# FIXME: need a CChainParams rather than hard-coded service_port
def __init__(self, service_url=None,
service_port=None,
btc_conf_file=None,
timeout=HTTP_TIMEOUT,
_connection=None):
"""Low-level JSON-RPC proxy
Unlike Proxy no conversion is done from the raw JSON objects.
"""
if service_url is None:
# Figure out the path to the bitcoin.conf file
if btc_conf_file is None:
if platform.system() == 'Darwin':
btc_conf_file = os.path.expanduser('~/Library/Application Support/Bitcoin/')
elif platform.system() == 'Windows':
btc_conf_file = os.path.join(os.environ['APPDATA'], 'Bitcoin')
else:
btc_conf_file = os.path.expanduser('~/.bitcoin')
btc_conf_file = os.path.join(btc_conf_file, 'bitcoin.conf')
# Extract contents of bitcoin.conf to build service_url
with open(btc_conf_file, 'r') as fd:
conf = {}
for line in fd.readlines():
if '#' in line:
line = line[:line.index('#')]
if '=' not in line:
continue
k, v = line.split('=', 1)
conf[k.strip()] = v.strip()
if service_port is None:
service_port = bitcoin.params.RPC_PORT
conf['rpcport'] = int(conf.get('rpcport', service_port))
conf['rpcssl'] = conf.get('rpcssl', '0')
if conf['rpcssl'].lower() in ('0', 'false'):
conf['rpcssl'] = False
elif conf['rpcssl'].lower() in ('1', 'true'):
conf['rpcssl'] = True
else:
raise ValueError('Unknown rpcssl value %r' % conf['rpcssl'])
service_url = ('%s://%s:%s@localhost:%d' %
('https' if conf['rpcssl'] else 'http',
conf['rpcuser'], conf['rpcpassword'],
conf['rpcport']))
self.__service_url = service_url
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
self.__id_count = 0
authpair = "%s:%s" % (self.__url.username, self.__url.password)
authpair = authpair.encode('utf8')
self.__auth_header = b"Basic " + base64.b64encode(authpair)
if _connection:
# Callables re-use the connection of the original proxy
self.__conn = _connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port=port,
key_file=None, cert_file=None,
timeout=timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port=port,
timeout=timeout)
def _call(self, service_name, *args):
self.__id_count += 1
postdata = json.dumps({'version': '1.1',
'method': service_name,
'params': args,
'id': self.__id_count})
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
response = self._get_response()
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
# Create a callable to do the actual call
f = lambda *args: self._call(name, *args)
# Make debuggers show <function bitcoin.rpc.name> rather than <function
# bitcoin.rpc.<lambda>>
f.__name__ = name
return f
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list))
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
return self._get_response()
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
return json.loads(http_response.read().decode('utf8'),
parse_float=decimal.Decimal)
class Proxy(RawProxy):
def __init__(self, service_url=None,
service_port=None,
btc_conf_file=None,
timeout=HTTP_TIMEOUT,
**kwargs):
"""Create a proxy to a bitcoin RPC service
Unlike RawProxy data is passed as objects, rather than JSON. (not yet
fully implemented) Assumes Bitcoin Core version >= 0.9; older versions
mostly work, but there are a few incompatibilities.
If service_url is not specified the username and password are read out
of the file btc_conf_file. If btc_conf_file is not specified
~/.bitcoin/bitcoin.conf or equivalent is used by default. The default
port is set according to the chain parameters in use: mainnet, testnet,
or regtest.
Usually no arguments to Proxy() are needed; the local bitcoind will be
used.
timeout - timeout in seconds before the HTTP interface times out
"""
super(Proxy, self).__init__(service_url=service_url, service_port=service_port, btc_conf_file=btc_conf_file,
timeout=HTTP_TIMEOUT,
**kwargs)
def getaccountaddress(self, account=None):
"""Return the current Bitcoin address for receiving payments to this account."""
r = self._call('getaccountaddress', account)
return CBitcoinAddress(r)
def getbalance(self, account='*', minconf=1):
"""Get the balance
account - The selected account. Defaults to "*" for entire wallet. It may be the default account using "".
minconf - Only include transactions confirmed at least this many times. (default=1)
"""
r = self._call('getbalance', account, minconf)
return int(r*COIN)
def getblock(self, block_hash):
"""Get block <block_hash>
Raises IndexError if block_hash is not valid.
"""
try:
block_hash = b2lx(block_hash)
except TypeError:
raise TypeError('%s.getblock(): block_hash must be bytes; got %r instance' %<|fim▁hole|> except JSONRPCException as ex:
raise IndexError('%s.getblock(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
return CBlock.deserialize(unhexlify(r))
def getblockhash(self, height):
"""Return hash of block in best-block-chain at height.
Raises IndexError if height is not valid.
"""
try:
return lx(self._call('getblockhash', height))
except JSONRPCException as ex:
raise IndexError('%s.getblockhash(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
def getinfo(self):
"""Return an object containing various state info"""
r = self._call('getinfo')
r['balance'] = int(r['balance'] * COIN)
r['paytxfee'] = int(r['paytxfee'] * COIN)
return r
def getnewaddress(self, account=None):
"""Return a new Bitcoin address for receiving payments.
If account is not None, it is added to the address book so payments
received with the address will be credited to account.
"""
r = None
if account is not None:
r = self._call('getnewaddress', account)
else:
r = self._call('getnewaddress')
return CBitcoinAddress(r)
def getrawchangeaddress(self):
"""Returns a new Bitcoin address, for receiving change.
This is for use with raw transactions, NOT normal use.
"""
r = self._call('getrawchangeaddress')
return CBitcoinAddress(r)
def getrawmempool(self, verbose=False):
"""Return the mempool"""
if verbose:
return self._call('getrawmempool', verbose)
else:
r = self._call('getrawmempool')
r = [lx(txid) for txid in r]
return r
def getrawtransaction(self, txid, verbose=False):
"""Return transaction with hash txid
Raises IndexError if transaction not found.
verbse - If true a dict is returned instead with additional information
on the transaction.
Note that if all txouts are spent and the transaction index is not
enabled the transaction may not be available.
"""
try:
r = self._call('getrawtransaction', b2lx(txid), 1 if verbose else 0)
except JSONRPCException as ex:
raise IndexError('%s.getrawtransaction(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
if verbose:
r['tx'] = CTransaction.deserialize(unhexlify(r['hex']))
del r['hex']
del r['txid']
del r['version']
del r['locktime']
del r['vin']
del r['vout']
r['blockhash'] = lx(r['blockhash']) if 'blockhash' in r else None
else:
r = CTransaction.deserialize(unhexlify(r))
return r
def gettransaction(self, txid):
"""Get detailed information about in-wallet transaction txid
Raises IndexError if transaction not found in the wallet.
FIXME: Returned data types are not yet converted.
"""
try:
r = self._call('gettransaction', b2lx(txid))
except JSONRPCException as ex:
raise IndexError('%s.getrawtransaction(): %s (%d)' %
(self.__class__.__name__, ex.error['message'], ex.error['code']))
return r
def gettxout(self, outpoint, includemempool=True):
"""Return details about an unspent transaction output.
Raises IndexError if outpoint is not found or was spent.
includemempool - Include mempool txouts
"""
r = self._call('gettxout', b2lx(outpoint.hash), outpoint.n, includemempool)
if r is None:
raise IndexError('%s.gettxout(): unspent txout %r not found' % (self.__class__.__name__, outpoint))
r['txout'] = CTxOut(int(r['value'] * COIN),
CScript(unhexlify(r['scriptPubKey']['hex'])))
del r['value']
del r['scriptPubKey']
r['bestblock'] = lx(r['bestblock'])
return r
def listunspent(self, minconf=0, maxconf=9999999, addrs=None):
"""Return unspent transaction outputs in wallet
Outputs will have between minconf and maxconf (inclusive)
confirmations, optionally filtered to only include txouts paid to
addresses in addrs.
"""
r = None
if addrs is None:
r = self._call('listunspent', minconf, maxconf)
else:
addrs = [str(addr) for addr in addrs]
r = self._call('listunspent', minconf, maxconf, addrs)
r2 = []
for unspent in r:
unspent['outpoint'] = COutPoint(lx(unspent['txid']), unspent['vout'])
del unspent['txid']
del unspent['vout']
unspent['address'] = CBitcoinAddress(unspent['address'])
unspent['scriptPubKey'] = CScript(unhexlify(unspent['scriptPubKey']))
unspent['amount'] = int(unspent['amount'] * COIN)
r2.append(unspent)
return r2
def lockunspent(self, unlock, outpoints):
"""Lock or unlock outpoints"""
json_outpoints = [{'txid':b2lx(outpoint.hash),'vout':outpoint.n} for outpoint in outpoints]
return self._call('lockunspent', unlock, json_outpoints)
def sendrawtransaction(self, tx):
"""Submit transaction to local node and network."""
hextx = hexlify(tx.serialize())
r = self._call('sendrawtransaction', hextx)
return lx(r)
def sendtoaddress(self, addr, amount):
"""Sent amount to a given address"""
addr = str(addr)
amount = float(amount)/COIN
r = self._call('sendtoaddress', addr, amount)
return lx(r)
def signrawtransaction(self, tx, *args):
"""Sign inputs for transaction
FIXME: implement options
"""
hextx = hexlify(tx.serialize())
r = self._call('signrawtransaction', hextx, *args)
r['tx'] = CTransaction.deserialize(unhexlify(r['hex']))
del r['hex']
return r
def submitblock(self, block, params=None):
"""Submit a new block to the network.
params is optional and is currently ignored by bitcoind. See
https://en.bitcoin.it/wiki/BIP_0022 for full specification.
"""
hexblock = hexlify(block.serialize())
if params is not None:
return self._call('submitblock', hexblock, params)
else:
return self._call('submitblock', hexblock)
def validateaddress(self, address):
"""Return information about an address"""
r = self._call('validateaddress', str(address))
r['address'] = CBitcoinAddress(r['address'])
if 'pubkey' in r:
r['pubkey'] = unhexlify(r['pubkey'])
return r<|fim▁end|>
|
(self.__class__.__name__, block_hash.__class__))
try:
r = self._call('getblock', block_hash, False)
|
<|file_name|>DiffCompare.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ***********************IMPORTANT NMAP LICENSE TERMS************************
# * *
# * The Nmap Security Scanner is (C) 1996-2013 Insecure.Com LLC. Nmap is *
# * also a registered trademark of Insecure.Com LLC. This program is free *
# * software; you may redistribute and/or modify it under the terms of the *
# * GNU General Public License as published by the Free Software *
# * Foundation; Version 2 ("GPL"), BUT ONLY WITH ALL OF THE CLARIFICATIONS *
# * AND EXCEPTIONS DESCRIBED HEREIN. This guarantees your right to use, *
# * modify, and redistribute this software under certain conditions. If *
# * you wish to embed Nmap technology into proprietary software, we sell *
# * alternative licenses (contact [email protected]). Dozens of software *
# * vendors already license Nmap technology such as host discovery, port *
# * scanning, OS detection, version detection, and the Nmap Scripting *
# * Engine. *
# * *
# * Note that the GPL places important restrictions on "derivative works", *
# * yet it does not provide a detailed definition of that term. To avoid *
# * misunderstandings, we interpret that term as broadly as copyright law *
# * allows. For example, we consider an application to constitute a *
# * derivative work for the purpose of this license if it does any of the *
# * following with any software or content covered by this license *
# * ("Covered Software"): *
# * *
# * o Integrates source code from Covered Software. *
# * *
# * o Reads or includes copyrighted data files, such as Nmap's nmap-os-db *
# * or nmap-service-probes. *
# * *
# * o Is designed specifically to execute Covered Software and parse the *
# * results (as opposed to typical shell or execution-menu apps, which will *
# * execute anything you tell them to). *
# * *
# * o Includes Covered Software in a proprietary executable installer. The *
# * installers produced by InstallShield are an example of this. Including *
# * Nmap with other software in compressed or archival form does not *
# * trigger this provision, provided appropriate open source decompression *
# * or de-archiving software is widely available for no charge. For the *
# * purposes of this license, an installer is considered to include Covered *
# * Software even if it actually retrieves a copy of Covered Software from *
# * another source during runtime (such as by downloading it from the *
# * Internet). *
# * *
# * o Links (statically or dynamically) to a library which does any of the *
# * above. *
# * *
# * o Executes a helper program, module, or script to do any of the above. *
# * *
# * This list is not exclusive, but is meant to clarify our interpretation *
# * of derived works with some common examples. Other people may interpret *
# * the plain GPL differently, so we consider this a special exception to *
# * the GPL that we apply to Covered Software. Works which meet any of *
# * these conditions must conform to all of the terms of this license, *
# * particularly including the GPL Section 3 requirements of providing *
# * source code and allowing free redistribution of the work as a whole. *
# * *
# * As another special exception to the GPL terms, Insecure.Com LLC grants *
# * permission to link the code of this program with any version of the *
# * OpenSSL library which is distributed under a license identical to that *
# * listed in the included docs/licenses/OpenSSL.txt file, and distribute *
# * linked combinations including the two. *
# * *
# * Any redistribution of Covered Software, including any derived works, *
# * must obey and carry forward all of the terms of this license, including *
# * obeying all GPL rules and restrictions. For example, source code of *
# * the whole work must be provided and free redistribution must be *
# * allowed. All GPL references to "this License", are to be treated as *
# * including the terms and conditions of this license text as well. *
# * *
# * Because this license imposes special exceptions to the GPL, Covered *
# * Work may not be combined (even as part of a larger work) with plain GPL *
# * software. The terms, conditions, and exceptions of this license must *
# * be included as well. This license is incompatible with some other open *
# * source licenses as well. In some cases we can relicense portions of *
# * Nmap or grant special permissions to use it in other open source *
# * software. Please contact [email protected] with any such requests. *
# * Similarly, we don't incorporate incompatible open source software into *
# * Covered Software without special permission from the copyright holders. *
# * *
# * If you have any questions about the licensing restrictions on using *
# * Nmap in other works, are happy to help. As mentioned above, we also *
# * offer alternative license to integrate Nmap into proprietary *
# * applications and appliances. These contracts have been sold to dozens *
# * of software vendors, and generally include a perpetual license as well *
# * as providing for priority support and updates. They also fund the *
# * continued development of Nmap. Please email [email protected] for further *
# * information. *
# * *
# * If you have received a written license agreement or contract for *
# * Covered Software stating terms other than these, you may choose to use *
# * and redistribute Covered Software under those terms instead of these. *
# * *
# * Source is provided to this software because we believe users have a *
# * right to know exactly what a program is going to do before they run it. *
# * This also allows you to audit the software for security holes (none *
# * have been found so far). *
# * *
# * Source code also allows you to port Nmap to new platforms, fix bugs, *
# * and add new features. You are highly encouraged to send your changes *
# * to the [email protected] mailing list for possible incorporation into the *
# * main distribution. By sending these changes to Fyodor or one of the *
# * Insecure.Org development mailing lists, or checking them into the Nmap *
# * source code repository, it is understood (unless you specify otherwise) *
# * that you are offering the Nmap Project (Insecure.Com LLC) the *
# * unlimited, non-exclusive right to reuse, modify, and relicense the *
# * code. Nmap will always be available Open Source, but this is important *
# * because the inability to relicense code has caused devastating problems *
# * for other Free Software projects (such as KDE and NASM). We also *
# * occasionally relicense the code to third parties as discussed above. *
# * If you wish to specify special license conditions of your *
# * contributions, just say so when you send them. *
# * *
# * This program is distributed in the hope that it will be useful, but *
# * WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Nmap *
# * license file for more details (it's in a COPYING file included with *
# * Nmap, and also available from https://svn.nmap.org/nmap/COPYING *
# * *
# ***************************************************************************/
import gobject
import gtk
import pango
import os
import os.path
import sys
import xml.sax
from zenmapGUI.higwidgets.higdialogs import HIGAlertDialog, HIGDialog
from zenmapGUI.higwidgets.higboxes import HIGVBox, HIGHBox, \
hig_box_space_holder
from zenmapGUI.higwidgets.higlabels import HIGSectionLabel
from zenmapGUI.higwidgets.higtables import HIGTable
from zenmapGUI.higwidgets.higbuttons import HIGButton
from zenmapCore.NmapParser import NmapParser
from zenmapCore.UmitLogging import log
import zenmapCore.I18N
import zenmapCore.Diff
from zenmapGUI.FileChoosers import ResultsFileSingleChooserDialog
# In milliseconds.
NDIFF_CHECK_TIMEOUT = 200
class ScanChooser(HIGVBox):
"""This class allows the selection of scan results from the list of open
tabs or from a file. It emits the "changed" signal when the scan selection
has changed."""
__gsignals__ = {
"changed": (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ())
}
def __init__(self, scans, title):
self.__gobject_init__()
self.title = title
self.scan_dict = {}
# Setting HIGVBox
self.set_border_width(5)
self.set_spacing(6)
self._create_widgets()
self._pack_hbox()
self._attaching_widgets()
self._set_scrolled()
self._set_text_view()
self._set_open_button()
for scan in scans:
self.add_scan(scan.scan_name or scan.get_nmap_command(), scan)
self.combo_scan.connect('changed', self.show_scan)
self.combo_scan.connect('changed', lambda x: self.emit('changed'))
self._pack_noexpand_nofill(self.lbl_scan)
self._pack_expand_fill(self.hbox)
def _create_widgets(self):
self.lbl_scan = HIGSectionLabel(self.title)
self.hbox = HIGHBox()
self.table = HIGTable()
self.list_scan = gtk.ListStore(str)
self.combo_scan = gtk.ComboBoxEntry(self.list_scan, 0)
self.btn_open_scan = gtk.Button(stock=gtk.STOCK_OPEN)
self.exp_scan = gtk.Expander(_("Scan Output"))
self.scrolled = gtk.ScrolledWindow()
self.txt_scan_result = gtk.TextView()
self.txg_tag = gtk.TextTag("scan_style")
def get_buffer(self):
return self.txt_scan_result.get_buffer()
def show_scan(self, widget):
nmap_output = self.get_nmap_output()
if nmap_output is not None:
self.txt_scan_result.get_buffer().set_text(nmap_output)
def normalize_output(self, output):
return "\n".join(output.split("\\n"))
def _pack_hbox(self):
self.hbox._pack_noexpand_nofill(hig_box_space_holder())
self.hbox._pack_expand_fill(self.table)
def _attaching_widgets(self):
self.table.attach(self.combo_scan, 0, 1, 0, 1, yoptions=0)
self.table.attach(
self.btn_open_scan, 1, 2, 0, 1, yoptions=0, xoptions=0)
self.table.attach(self.exp_scan, 0, 2, 1, 2)
def _set_scrolled(self):
self.scrolled.set_border_width(5)
self.scrolled.set_size_request(-1, 130)
# Packing scrolled window into expander
self.exp_scan.add(self.scrolled)
# Packing text view into scrolled window
self.scrolled.add_with_viewport(self.txt_scan_result)
# Setting scrolled window
self.scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
def _set_text_view(self):
self.txg_table = self.txt_scan_result.get_buffer().get_tag_table()
self.txg_table.add(self.txg_tag)
self.txg_tag.set_property("family", "Monospace")
self.txt_scan_result.set_wrap_mode(gtk.WRAP_WORD)
self.txt_scan_result.set_editable(False)
self.txt_scan_result.get_buffer().connect(
"changed", self._text_changed_cb)
def _set_open_button(self):
self.btn_open_scan.connect('clicked', self.open_file)
def open_file(self, widget):
file_chooser = ResultsFileSingleChooserDialog(_("Select Scan Result"))
response = file_chooser.run()
file_chosen = file_chooser.get_filename()
file_chooser.destroy()
if response == gtk.RESPONSE_OK:
try:
parser = NmapParser()
parser.parse_file(file_chosen)
except xml.sax.SAXParseException, e:
alert = HIGAlertDialog(
message_format='<b>%s</b>' % _('Error parsing file'),
secondary_text=_(
"The file is not an Nmap XML output file. "
"The parsing error that occurred was\n%s") % str(e))
alert.run()
alert.destroy()
return False
except Exception, e:
alert = HIGAlertDialog(
message_format='<b>%s</b>' % _(
'Cannot open selected file'),
secondary_text=_("""\
This error occurred while trying to open the file:
%s""") % str(e))
alert.run()
alert.destroy()
return False
scan_name = os.path.split(file_chosen)[-1]
self.add_scan(scan_name, parser)
self.combo_scan.set_active(len(self.list_scan) - 1)
def add_scan(self, scan_name, parser):
scan_id = 1
new_scan_name = scan_name
while new_scan_name in self.scan_dict.keys():
new_scan_name = "%s (%s)" % (scan_name, scan_id)
scan_id += 1
self.list_scan.append([new_scan_name])
self.scan_dict[new_scan_name] = parser
def _text_changed_cb(self, widget):
buff = self.txt_scan_result.get_buffer()
buff.apply_tag(
self.txg_tag, buff.get_start_iter(), buff.get_end_iter())
def get_parsed_scan(self):
"""Return the currently selected scan's parsed output as an NmapParser
object, or None if no valid scan is selected."""
selected_scan = self.combo_scan.child.get_text()
return self.scan_dict.get(selected_scan)
def get_nmap_output(self):
"""Return the currently selected scan's output as a string, or None if
no valid scan is selected."""
return self.parsed_scan
nmap_output = property(get_nmap_output)
parsed_scan = property(get_parsed_scan)
class DiffWindow(gtk.Window):
def __init__(self, scans):
gtk.Window.__init__(self)
self.set_title(_("Compare Results"))
self.ndiff_process = None
# We allow the user to start a new diff before the old one has
# finished. We have to keep references to old processes until they
# finish to avoid problems when tearing down the Python interpreter at
# program exit.
self.old_processes = []
self.timer_id = None
self.main_vbox = HIGVBox()
self.diff_view = DiffView()
self.diff_view.set_size_request(-1, 100)
self.hbox_buttons = HIGHBox()
self.progress = gtk.ProgressBar()
self.btn_close = HIGButton(stock=gtk.STOCK_CLOSE)
self.hbox_selection = HIGHBox()
self.scan_chooser_a = ScanChooser(scans, _(u"A Scan"))
self.scan_chooser_b = ScanChooser(scans, _(u"B Scan"))
self._pack_widgets()
self._connect_widgets()
self.set_default_size(-1, 500)
# Initial Size Request
self.initial_size = self.get_size()
def _pack_widgets(self):
self.main_vbox.set_border_width(6)
self.hbox_selection.pack_start(self.scan_chooser_a, True, True)
self.hbox_selection.pack_start(self.scan_chooser_b, True, True)
self.main_vbox.pack_start(self.hbox_selection, False)
scroll = gtk.ScrolledWindow()
scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
scroll.add(self.diff_view)
self.main_vbox.pack_start(scroll, True, True)
self.progress.hide()
self.progress.set_no_show_all(True)
self.hbox_buttons.pack_start(self.progress, False)
self.hbox_buttons.pack_end(self.btn_close, False)
self.main_vbox._pack_noexpand_nofill(self.hbox_buttons)
self.add(self.main_vbox)
def _connect_widgets(self):
self.connect("delete-event", self.close)
self.btn_close.connect("clicked", self.close)
self.scan_chooser_a.connect('changed', self.refresh_diff)
self.scan_chooser_b.connect('changed', self.refresh_diff)
def refresh_diff(self, widget):
"""This method is called whenever the diff output might have changed,
such as when a different scan was selected in one of the choosers."""
log.debug("Refresh diff.")
if (self.ndiff_process is not None and
self.ndiff_process.poll() is None):
# Put this in the list of old processes we keep track of.
self.old_processes.append(self.ndiff_process)
self.ndiff_process = None
scan_a = self.scan_chooser_a.parsed_scan
scan_b = self.scan_chooser_b.parsed_scan
if scan_a is None or scan_b is None:
self.diff_view.clear()
else:
try:
self.ndiff_process = zenmapCore.Diff.ndiff(scan_a, scan_b)
except OSError, e:
alert = HIGAlertDialog(
message_format=_("Error running ndiff"),
secondary_text=_(
"There was an error running the ndiff program.\n\n"
) + str(e).decode(sys.getdefaultencoding(), "replace"))
alert.run()
alert.destroy()
else:
self.progress.show()
if self.timer_id is None:
self.timer_id = gobject.timeout_add(
NDIFF_CHECK_TIMEOUT, self.check_ndiff_process)
def check_ndiff_process(self):
"""Check if the ndiff subprocess is done and show the diff if it is.
Also remove any finished processes from the old process list."""
# Check if any old background processes have finished.
for p in self.old_processes[:]:
if p.poll() is not None:
p.close()
self.old_processes.remove(p)
if self.ndiff_process is not None:
# We're running the most recent scan. Check if it's done.
status = self.ndiff_process.poll()
if status is None:
# Keep calling this function on a timer until the process
# finishes.
self.progress.pulse()
return True
if status == 0 or status == 1:
# Successful completion.
try:
diff = self.ndiff_process.get_scan_diff()
except zenmapCore.Diff.NdiffParseException, e:
alert = HIGAlertDialog(
message_format=_("Error parsing ndiff output"),
secondary_text=str(e))
alert.run()
alert.destroy()
else:
self.diff_view.show_diff(diff)
else:
# Unsuccessful completion.
error_text = _(
"The ndiff process terminated with status code %d."
) % status
stderr = self.ndiff_process.stderr.read()
if len(stderr) > 0:
error_text += "\n\n" + stderr
alert = HIGAlertDialog(
message_format=_("Error running ndiff"),
secondary_text=error_text)
alert.run()
alert.destroy()
self.progress.hide()
self.ndiff_process.close()
self.ndiff_process = None
if len(self.old_processes) > 0:
# Keep calling this callback.
return True
else:
# All done.
self.timer_id = None
return False
def close(self, widget=None, extra=None):
self.destroy()
class DiffView(gtk.TextView):
REMOVE_COLOR = "#ffaaaa"
ADD_COLOR = "#ccffcc"
"""A widget displaying a zenmapCore.Diff.ScanDiff."""
def __init__(self):
gtk.TextView.__init__(self)
self.set_editable(False)
buff = self.get_buffer()
# Create text markup tags.
buff.create_tag("=", font="Monospace")
buff.create_tag(
"-", font="Monospace", background=self.REMOVE_COLOR)
buff.create_tag("+", font="Monospace", background=self.ADD_COLOR)
def clear(self):
self.get_buffer().set_text(u"")
<|fim▁hole|> def show_diff(self, diff):
self.clear()
buff = self.get_buffer()
for line in diff.splitlines(True):
if line.startswith("-"):
tags = ["-"]
elif line.startswith("+"):
tags = ["+"]
else:
tags = ["="]
buff.insert_with_tags_by_name(buff.get_end_iter(), line, *tags)
if __name__ == "__main__":
from zenmapCore.NmapParser import NmapParser
parsed1 = NmapParser()
parsed2 = NmapParser()
parsed3 = NmapParser()
parsed4 = NmapParser()
parsed1.parse_file("test/xml_test1.xml")
parsed2.parse_file("test/xml_test2.xml")
parsed3.parse_file("test/xml_test3.xml")
parsed4.parse_file("test/xml_test4.xml")
dw = DiffWindow({"Parsed 1": parsed1,
"Parsed 2": parsed2,
"Parsed 3": parsed3,
"Parsed 4": parsed4})
dw.show_all()
dw.connect("delete-event", lambda x, y: gtk.main_quit())
gtk.main()<|fim▁end|>
| |
<|file_name|>elastic.rs<|end_file_name|><|fim▁begin|>use super::ease::Easing;
use functions::util::*;
use num_traits::float::FloatConst;
/// This struct captures Elastic easing functions
#[derive(Debug)]
pub struct Elastic;
impl<F: Float + FloatConst> Easing<F> for Elastic {
fn ease_in(t: F, b: F, c: F, d: F) -> F {
cast_constants!(F; _2=2, _10=10);
if t == f(0.0) {
return b;
}
let t = t / d;
if t == f(1.0) {
return b + c;
}
let p = d * f(0.3);
let a = c;
let s = p / f(4.0);
let t = t - f(1.0);
let post_fix = a * _2.powf(_10 * t);
let temp = (t * d - s) * (_2 * F::PI()) / p;
-(post_fix * temp.sin()) + b
}
fn ease_out(t: F, b: F, c: F, d: F) -> F {
cast_constants!(F; _2=2, _10=10);
if t == f(0.0) {
return b;
}
let t = t / d;
if t == f(1.0) {
return b + c;
}
let p = d * f(0.3);
let a = c;
let s = p / f(4.0);
let temp = (t * d - s) * (_2 * F::PI()) / p;
a * _2.powf(-_10 * t) * temp.sin() + c + b
}
fn ease_in_out(t: F, b: F, c: F, d: F) -> F {
cast_constants!(F; _2=2, _10=10, _0_5=0.5);
if t == f(0.0) {
return b;
}
let t = t / (d / _2);
if t == _2 {
return b + c;
}
let p = d * f(0.3) * f(1.5);
let a = c;
let s = p / f(4.0);
if t < f(1.0) {
let t = t - f(1.0);
let post_fix = a * _2.powf(_10 * t);
let temp = (t * d - s) * (_2 * F::PI()) / p;
return -_0_5 * (post_fix * temp.sin()) + b;
}
let t = t - f(1.0);
let post_fix = a * _2.powf(-_10 * t);
let temp = (t * d - s) * (_2 * F::PI()) / p;
post_fix * temp.sin() * f(0.5) + c + b
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ease_in() {
assert_relative_eq!(Elastic::ease_in(1.0_f32, 2.0, 3.0, 4.0), 1.983427);
assert_relative_eq!(Elastic::ease_in(0.0_f32, 1.0, 100.0, 100.0), 1.0000);
assert_relative_eq!(Elastic::ease_in(100.0_f32, 1.0, 100.0, 100.0), 101.000);
}
<|fim▁hole|> #[test]
fn ease_out() {
assert_relative_eq!(Elastic::ease_out(1.0_f32, 2.0, 3.0, 4.0), 4.734835);
assert_relative_eq!(Elastic::ease_out(0.0_f32, 1.0, 100.0, 100.0), 1.0000);
assert_relative_eq!(Elastic::ease_out(100.0_f32, 1.0, 100.0, 100.0), 101.000);
}
#[test]
fn ease_in_out() {
assert_relative_eq!(Elastic::ease_in_out(1.0_f32, 2.0, 3.0, 4.0), 2.035908);
assert_relative_eq!(Elastic::ease_in_out(0.0_f32, 1.0, 100.0, 100.0), 1.0000);
assert_relative_eq!(Elastic::ease_in_out(100.0_f32, 1.0, 100.0, 100.0), 101.0000);
assert_relative_eq!(Elastic::ease_in_out(51.0_f32, 1.0, 100.0, 100.0), 59.158646);
}
const PRECISE_RESULT: f64 = 1.9952083561735905;
#[test]
fn f32_precision() {
let ease32 = Elastic::ease_in(10_f32.sqrt(), 2.0, 3.0, 10.0);
assert_relative_ne!(ease32 as f64, PRECISE_RESULT); // f32 maths is actually happening
assert_relative_eq!(ease32, PRECISE_RESULT as f32);
}
#[test]
fn f64_precision() {
let ease64 = Elastic::ease_in(10_f64.sqrt(), 2.0, 3.0, 10.0);
assert_relative_eq!(ease64, PRECISE_RESULT);
}
}<|fim▁end|>
| |
<|file_name|>test_calibration.py<|end_file_name|><|fim▁begin|># Authors: Alexandre Gramfort <[email protected]>
# License: BSD 3 clause
import pytest
import numpy as np
from numpy.testing import assert_allclose
from scipy import sparse
from sklearn.base import BaseEstimator
from sklearn.dummy import DummyClassifier
from sklearn.model_selection import LeaveOneOut, train_test_split
from sklearn.utils._testing import (assert_array_almost_equal,
assert_almost_equal,
assert_array_equal,
ignore_warnings)
from sklearn.utils.extmath import softmax
from sklearn.exceptions import NotFittedError
from sklearn.datasets import make_classification, make_blobs
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import KFold, cross_val_predict
from sklearn.naive_bayes import MultinomialNB
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
from sklearn.svm import LinearSVC
from sklearn.isotonic import IsotonicRegression
from sklearn.feature_extraction import DictVectorizer
from sklearn.pipeline import Pipeline
from sklearn.impute import SimpleImputer
from sklearn.metrics import brier_score_loss
from sklearn.calibration import CalibratedClassifierCV, _CalibratedClassifier
from sklearn.calibration import _sigmoid_calibration, _SigmoidCalibration
from sklearn.calibration import calibration_curve
@pytest.fixture(scope="module")
def data():
X, y = make_classification(
n_samples=200, n_features=6, random_state=42
)
return X, y
@pytest.mark.parametrize('method', ['sigmoid', 'isotonic'])
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration(data, method, ensemble):
# Test calibration objects with isotonic and sigmoid
n_samples = 100
X, y = data
sample_weight = np.random.RandomState(seed=42).uniform(size=y.size)
X -= X.min() # MultinomialNB only allows positive X
# split train and test
X_train, y_train, sw_train = \
X[:n_samples], y[:n_samples], sample_weight[:n_samples]
X_test, y_test = X[n_samples:], y[n_samples:]
# Naive-Bayes
clf = MultinomialNB().fit(X_train, y_train, sample_weight=sw_train)
prob_pos_clf = clf.predict_proba(X_test)[:, 1]
cal_clf = CalibratedClassifierCV(clf, cv=y.size + 1, ensemble=ensemble)
with pytest.raises(ValueError):
cal_clf.fit(X, y)
# Naive Bayes with calibration
for this_X_train, this_X_test in [(X_train, X_test),
(sparse.csr_matrix(X_train),
sparse.csr_matrix(X_test))]:
cal_clf = CalibratedClassifierCV(
clf, method=method, cv=5, ensemble=ensemble
)
# Note that this fit overwrites the fit on the entire training
# set
cal_clf.fit(this_X_train, y_train, sample_weight=sw_train)
prob_pos_cal_clf = cal_clf.predict_proba(this_X_test)[:, 1]
# Check that brier score has improved after calibration
assert (brier_score_loss(y_test, prob_pos_clf) >
brier_score_loss(y_test, prob_pos_cal_clf))
# Check invariance against relabeling [0, 1] -> [1, 2]
cal_clf.fit(this_X_train, y_train + 1, sample_weight=sw_train)
prob_pos_cal_clf_relabeled = cal_clf.predict_proba(this_X_test)[:, 1]
assert_array_almost_equal(prob_pos_cal_clf,
prob_pos_cal_clf_relabeled)
# Check invariance against relabeling [0, 1] -> [-1, 1]
cal_clf.fit(this_X_train, 2 * y_train - 1, sample_weight=sw_train)
prob_pos_cal_clf_relabeled = cal_clf.predict_proba(this_X_test)[:, 1]
assert_array_almost_equal(prob_pos_cal_clf, prob_pos_cal_clf_relabeled)
# Check invariance against relabeling [0, 1] -> [1, 0]
cal_clf.fit(this_X_train, (y_train + 1) % 2, sample_weight=sw_train)
prob_pos_cal_clf_relabeled = cal_clf.predict_proba(this_X_test)[:, 1]
if method == "sigmoid":
assert_array_almost_equal(prob_pos_cal_clf,
1 - prob_pos_cal_clf_relabeled)
else:
# Isotonic calibration is not invariant against relabeling
# but should improve in both cases
assert (brier_score_loss(y_test, prob_pos_clf) >
brier_score_loss((y_test + 1) % 2,
prob_pos_cal_clf_relabeled))
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration_bad_method(data, ensemble):
# Check only "isotonic" and "sigmoid" are accepted as methods
X, y = data
clf = LinearSVC()
clf_invalid_method = CalibratedClassifierCV(
clf, method="foo", ensemble=ensemble
)
with pytest.raises(ValueError):
clf_invalid_method.fit(X, y)
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration_regressor(data, ensemble):
# `base-estimator` should provide either decision_function or
# predict_proba (most regressors, for instance, should fail)
X, y = data
clf_base_regressor = \
CalibratedClassifierCV(RandomForestRegressor(), ensemble=ensemble)
with pytest.raises(RuntimeError):
clf_base_regressor.fit(X, y)
def test_calibration_default_estimator(data):
# Check base_estimator default is LinearSVC
X, y = data
calib_clf = CalibratedClassifierCV(cv=2)
calib_clf.fit(X, y)
base_est = calib_clf.calibrated_classifiers_[0].base_estimator
assert isinstance(base_est, LinearSVC)
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration_cv_splitter(data, ensemble):
# Check when `cv` is a CV splitter
X, y = data
splits = 5
kfold = KFold(n_splits=splits)
calib_clf = CalibratedClassifierCV(cv=kfold, ensemble=ensemble)
assert isinstance(calib_clf.cv, KFold)
assert calib_clf.cv.n_splits == splits
calib_clf.fit(X, y)
expected_n_clf = splits if ensemble else 1
assert len(calib_clf.calibrated_classifiers_) == expected_n_clf
@pytest.mark.parametrize('method', ['sigmoid', 'isotonic'])
@pytest.mark.parametrize('ensemble', [True, False])
def test_sample_weight(data, method, ensemble):
n_samples = 100
X, y = data
sample_weight = np.random.RandomState(seed=42).uniform(size=len(y))
X_train, y_train, sw_train = \
X[:n_samples], y[:n_samples], sample_weight[:n_samples]
X_test = X[n_samples:]
base_estimator = LinearSVC(random_state=42)
calibrated_clf = CalibratedClassifierCV(
base_estimator, method=method, ensemble=ensemble
)
calibrated_clf.fit(X_train, y_train, sample_weight=sw_train)
probs_with_sw = calibrated_clf.predict_proba(X_test)
# As the weights are used for the calibration, they should still yield
# different predictions
calibrated_clf.fit(X_train, y_train)
probs_without_sw = calibrated_clf.predict_proba(X_test)
diff = np.linalg.norm(probs_with_sw - probs_without_sw)
assert diff > 0.1
@pytest.mark.parametrize('method', ['sigmoid', 'isotonic'])
@pytest.mark.parametrize('ensemble', [True, False])
def test_parallel_execution(data, method, ensemble):
"""Test parallel calibration"""
X, y = data
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
base_estimator = LinearSVC(random_state=42)
cal_clf_parallel = CalibratedClassifierCV(
base_estimator, method=method, n_jobs=2, ensemble=ensemble
)
cal_clf_parallel.fit(X_train, y_train)
probs_parallel = cal_clf_parallel.predict_proba(X_test)
cal_clf_sequential = CalibratedClassifierCV(
base_estimator, method=method, n_jobs=1, ensemble=ensemble
)
cal_clf_sequential.fit(X_train, y_train)
probs_sequential = cal_clf_sequential.predict_proba(X_test)
assert_allclose(probs_parallel, probs_sequential)
@pytest.mark.parametrize('method', ['sigmoid', 'isotonic'])
@pytest.mark.parametrize('ensemble', [True, False])
# increase the number of RNG seeds to assess the statistical stability of this
# test:
@pytest.mark.parametrize('seed', range(2))
def test_calibration_multiclass(method, ensemble, seed):
def multiclass_brier(y_true, proba_pred, n_classes):
Y_onehot = np.eye(n_classes)[y_true]
return np.sum((Y_onehot - proba_pred) ** 2) / Y_onehot.shape[0]
# Test calibration for multiclass with classifier that implements
# only decision function.
clf = LinearSVC(random_state=7)
X, y = make_blobs(n_samples=500, n_features=100, random_state=seed,
centers=10, cluster_std=15.0)
# Use an unbalanced dataset by collapsing 8 clusters into one class
# to make the naive calibration based on a softmax more unlikely
# to work.
y[y > 2] = 2
n_classes = np.unique(y).shape[0]
X_train, y_train = X[::2], y[::2]
X_test, y_test = X[1::2], y[1::2]
clf.fit(X_train, y_train)
cal_clf = CalibratedClassifierCV(<|fim▁hole|> )
cal_clf.fit(X_train, y_train)
probas = cal_clf.predict_proba(X_test)
# Check probabilities sum to 1
assert_allclose(np.sum(probas, axis=1), np.ones(len(X_test)))
# Check that the dataset is not too trivial, otherwise it's hard
# to get interesting calibration data during the internal
# cross-validation loop.
assert 0.65 < clf.score(X_test, y_test) < 0.95
# Check that the accuracy of the calibrated model is never degraded
# too much compared to the original classifier.
assert cal_clf.score(X_test, y_test) > 0.95 * clf.score(X_test, y_test)
# Check that Brier loss of calibrated classifier is smaller than
# loss obtained by naively turning OvR decision function to
# probabilities via a softmax
uncalibrated_brier = \
multiclass_brier(y_test, softmax(clf.decision_function(X_test)),
n_classes=n_classes)
calibrated_brier = multiclass_brier(y_test, probas,
n_classes=n_classes)
assert calibrated_brier < 1.1 * uncalibrated_brier
# Test that calibration of a multiclass classifier decreases log-loss
# for RandomForestClassifier
clf = RandomForestClassifier(n_estimators=30, random_state=42)
clf.fit(X_train, y_train)
clf_probs = clf.predict_proba(X_test)
uncalibrated_brier = multiclass_brier(y_test, clf_probs,
n_classes=n_classes)
cal_clf = CalibratedClassifierCV(
clf, method=method, cv=5, ensemble=ensemble
)
cal_clf.fit(X_train, y_train)
cal_clf_probs = cal_clf.predict_proba(X_test)
calibrated_brier = multiclass_brier(y_test, cal_clf_probs,
n_classes=n_classes)
assert calibrated_brier < 1.1 * uncalibrated_brier
def test_calibration_zero_probability():
# Test an edge case where _CalibratedClassifier avoids numerical errors
# in the multiclass normalization step if all the calibrators output
# are zero all at once for a given sample and instead fallback to uniform
# probabilities.
class ZeroCalibrator():
# This function is called from _CalibratedClassifier.predict_proba.
def predict(self, X):
return np.zeros(X.shape[0])
X, y = make_blobs(n_samples=50, n_features=10, random_state=7,
centers=10, cluster_std=15.0)
clf = DummyClassifier().fit(X, y)
calibrator = ZeroCalibrator()
cal_clf = _CalibratedClassifier(
base_estimator=clf, calibrators=[calibrator], classes=clf.classes_)
probas = cal_clf.predict_proba(X)
# Check that all probabilities are uniformly 1. / clf.n_classes_
assert_allclose(probas, 1. / clf.n_classes_)
def test_calibration_prefit():
"""Test calibration for prefitted classifiers"""
n_samples = 50
X, y = make_classification(n_samples=3 * n_samples, n_features=6,
random_state=42)
sample_weight = np.random.RandomState(seed=42).uniform(size=y.size)
X -= X.min() # MultinomialNB only allows positive X
# split train and test
X_train, y_train, sw_train = \
X[:n_samples], y[:n_samples], sample_weight[:n_samples]
X_calib, y_calib, sw_calib = \
X[n_samples:2 * n_samples], y[n_samples:2 * n_samples], \
sample_weight[n_samples:2 * n_samples]
X_test, y_test = X[2 * n_samples:], y[2 * n_samples:]
# Naive-Bayes
clf = MultinomialNB()
# Check error if clf not prefit
unfit_clf = CalibratedClassifierCV(clf, cv="prefit")
with pytest.raises(NotFittedError):
unfit_clf.fit(X_calib, y_calib)
clf.fit(X_train, y_train, sw_train)
prob_pos_clf = clf.predict_proba(X_test)[:, 1]
# Naive Bayes with calibration
for this_X_calib, this_X_test in [(X_calib, X_test),
(sparse.csr_matrix(X_calib),
sparse.csr_matrix(X_test))]:
for method in ['isotonic', 'sigmoid']:
cal_clf = CalibratedClassifierCV(clf, method=method, cv="prefit")
for sw in [sw_calib, None]:
cal_clf.fit(this_X_calib, y_calib, sample_weight=sw)
y_prob = cal_clf.predict_proba(this_X_test)
y_pred = cal_clf.predict(this_X_test)
prob_pos_cal_clf = y_prob[:, 1]
assert_array_equal(y_pred,
np.array([0, 1])[np.argmax(y_prob, axis=1)])
assert (brier_score_loss(y_test, prob_pos_clf) >
brier_score_loss(y_test, prob_pos_cal_clf))
@pytest.mark.parametrize('method', ['sigmoid', 'isotonic'])
def test_calibration_ensemble_false(data, method):
# Test that `ensemble=False` is the same as using predictions from
# `cross_val_predict` to train calibrator.
X, y = data
clf = LinearSVC(random_state=7)
cal_clf = CalibratedClassifierCV(clf, method=method, cv=3, ensemble=False)
cal_clf.fit(X, y)
cal_probas = cal_clf.predict_proba(X)
# Get probas manually
unbiased_preds = cross_val_predict(
clf, X, y, cv=3, method='decision_function'
)
if method == 'isotonic':
calibrator = IsotonicRegression(out_of_bounds='clip')
else:
calibrator = _SigmoidCalibration()
calibrator.fit(unbiased_preds, y)
# Use `clf` fit on all data
clf.fit(X, y)
clf_df = clf.decision_function(X)
manual_probas = calibrator.predict(clf_df)
assert_allclose(cal_probas[:, 1], manual_probas)
def test_sigmoid_calibration():
"""Test calibration values with Platt sigmoid model"""
exF = np.array([5, -4, 1.0])
exY = np.array([1, -1, -1])
# computed from my python port of the C++ code in LibSVM
AB_lin_libsvm = np.array([-0.20261354391187855, 0.65236314980010512])
assert_array_almost_equal(AB_lin_libsvm,
_sigmoid_calibration(exF, exY), 3)
lin_prob = 1. / (1. + np.exp(AB_lin_libsvm[0] * exF + AB_lin_libsvm[1]))
sk_prob = _SigmoidCalibration().fit(exF, exY).predict(exF)
assert_array_almost_equal(lin_prob, sk_prob, 6)
# check that _SigmoidCalibration().fit only accepts 1d array or 2d column
# arrays
with pytest.raises(ValueError):
_SigmoidCalibration().fit(np.vstack((exF, exF)), exY)
def test_calibration_curve():
"""Check calibration_curve function"""
y_true = np.array([0, 0, 0, 1, 1, 1])
y_pred = np.array([0., 0.1, 0.2, 0.8, 0.9, 1.])
prob_true, prob_pred = calibration_curve(y_true, y_pred, n_bins=2)
prob_true_unnormalized, prob_pred_unnormalized = \
calibration_curve(y_true, y_pred * 2, n_bins=2, normalize=True)
assert len(prob_true) == len(prob_pred)
assert len(prob_true) == 2
assert_almost_equal(prob_true, [0, 1])
assert_almost_equal(prob_pred, [0.1, 0.9])
assert_almost_equal(prob_true, prob_true_unnormalized)
assert_almost_equal(prob_pred, prob_pred_unnormalized)
# probabilities outside [0, 1] should not be accepted when normalize
# is set to False
with pytest.raises(ValueError):
calibration_curve([1.1], [-0.1], normalize=False)
# test that quantiles work as expected
y_true2 = np.array([0, 0, 0, 0, 1, 1])
y_pred2 = np.array([0., 0.1, 0.2, 0.5, 0.9, 1.])
prob_true_quantile, prob_pred_quantile = calibration_curve(
y_true2, y_pred2, n_bins=2, strategy='quantile')
assert len(prob_true_quantile) == len(prob_pred_quantile)
assert len(prob_true_quantile) == 2
assert_almost_equal(prob_true_quantile, [0, 2 / 3])
assert_almost_equal(prob_pred_quantile, [0.1, 0.8])
# Check that error is raised when invalid strategy is selected
with pytest.raises(ValueError):
calibration_curve(y_true2, y_pred2, strategy='percentile')
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration_nan_imputer(ensemble):
"""Test that calibration can accept nan"""
X, y = make_classification(n_samples=10, n_features=2,
n_informative=2, n_redundant=0,
random_state=42)
X[0, 0] = np.nan
clf = Pipeline(
[('imputer', SimpleImputer()),
('rf', RandomForestClassifier(n_estimators=1))])
clf_c = CalibratedClassifierCV(
clf, cv=2, method='isotonic', ensemble=ensemble
)
clf_c.fit(X, y)
clf_c.predict(X)
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration_prob_sum(ensemble):
# Test that sum of probabilities is 1. A non-regression test for
# issue #7796
num_classes = 2
X, y = make_classification(n_samples=10, n_features=5,
n_classes=num_classes)
clf = LinearSVC(C=1.0, random_state=7)
clf_prob = CalibratedClassifierCV(
clf, method="sigmoid", cv=LeaveOneOut(), ensemble=ensemble
)
clf_prob.fit(X, y)
probs = clf_prob.predict_proba(X)
assert_array_almost_equal(probs.sum(axis=1), np.ones(probs.shape[0]))
@pytest.mark.parametrize('ensemble', [True, False])
def test_calibration_less_classes(ensemble):
# Test to check calibration works fine when train set in a test-train
# split does not contain all classes
# Since this test uses LOO, at each iteration train set will not contain a
# class label
X = np.random.randn(10, 5)
y = np.arange(10)
clf = LinearSVC(C=1.0, random_state=7)
cal_clf = CalibratedClassifierCV(
clf, method="sigmoid", cv=LeaveOneOut(), ensemble=ensemble
)
cal_clf.fit(X, y)
for i, calibrated_classifier in \
enumerate(cal_clf.calibrated_classifiers_):
proba = calibrated_classifier.predict_proba(X)
if ensemble:
# Check that the unobserved class has proba=0
assert_array_equal(proba[:, i], np.zeros(len(y)))
# Check for all other classes proba>0
assert np.all(proba[:, :i] > 0)
assert np.all(proba[:, i + 1:] > 0)
else:
# Check `proba` are all 1/n_classes
assert np.allclose(proba, 1 / proba.shape[0])
@ignore_warnings(category=FutureWarning)
@pytest.mark.parametrize('X', [np.random.RandomState(42).randn(15, 5, 2),
np.random.RandomState(42).randn(15, 5, 2, 6)])
def test_calibration_accepts_ndarray(X):
"""Test that calibration accepts n-dimensional arrays as input"""
y = [1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0]
class MockTensorClassifier(BaseEstimator):
"""A toy estimator that accepts tensor inputs"""
def fit(self, X, y):
self.classes_ = np.unique(y)
return self
def decision_function(self, X):
# toy decision function that just needs to have the right shape:
return X.reshape(X.shape[0], -1).sum(axis=1)
calibrated_clf = CalibratedClassifierCV(MockTensorClassifier())
# we should be able to fit this classifier with no error
calibrated_clf.fit(X, y)
@pytest.fixture
def dict_data():
dict_data = [
{'state': 'NY', 'age': 'adult'},
{'state': 'TX', 'age': 'adult'},
{'state': 'VT', 'age': 'child'},
]
text_labels = [1, 0, 1]
return dict_data, text_labels
@pytest.fixture
def dict_data_pipeline(dict_data):
X, y = dict_data
pipeline_prefit = Pipeline([
('vectorizer', DictVectorizer()),
('clf', RandomForestClassifier())
])
return pipeline_prefit.fit(X, y)
def test_calibration_dict_pipeline(dict_data, dict_data_pipeline):
"""Test that calibration works in prefit pipeline with transformer
`X` is not array-like, sparse matrix or dataframe at the start.
See https://github.com/scikit-learn/scikit-learn/issues/8710
Also test it can predict without running into validation errors.
See https://github.com/scikit-learn/scikit-learn/issues/19637
"""
X, y = dict_data
clf = dict_data_pipeline
calib_clf = CalibratedClassifierCV(clf, cv='prefit')
calib_clf.fit(X, y)
# Check attributes are obtained from fitted estimator
assert_array_equal(calib_clf.classes_, clf.classes_)
# Neither the pipeline nor the calibration meta-estimator
# expose the n_features_in_ check on this kind of data.
assert not hasattr(clf, 'n_features_in_')
assert not hasattr(calib_clf, 'n_features_in_')
# Ensure that no error is thrown with predict and predict_proba
calib_clf.predict(X)
calib_clf.predict_proba(X)
@pytest.mark.parametrize('clf, cv', [
pytest.param(LinearSVC(C=1), 2),
pytest.param(LinearSVC(C=1), 'prefit'),
])
def test_calibration_attributes(clf, cv):
# Check that `n_features_in_` and `classes_` attributes created properly
X, y = make_classification(n_samples=10, n_features=5,
n_classes=2, random_state=7)
if cv == 'prefit':
clf = clf.fit(X, y)
calib_clf = CalibratedClassifierCV(clf, cv=cv)
calib_clf.fit(X, y)
if cv == 'prefit':
assert_array_equal(calib_clf.classes_, clf.classes_)
assert calib_clf.n_features_in_ == clf.n_features_in_
else:
classes = LabelEncoder().fit(y).classes_
assert_array_equal(calib_clf.classes_, classes)
assert calib_clf.n_features_in_ == X.shape[1]
def test_calibration_inconsistent_prefit_n_features_in():
# Check that `n_features_in_` from prefit base estimator
# is consistent with training set
X, y = make_classification(n_samples=10, n_features=5,
n_classes=2, random_state=7)
clf = LinearSVC(C=1).fit(X, y)
calib_clf = CalibratedClassifierCV(clf, cv='prefit')
msg = "X has 3 features, but LinearSVC is expecting 5 features as input."
with pytest.raises(ValueError, match=msg):
calib_clf.fit(X[:, :3], y)
# FIXME: remove in 1.1
def test_calibrated_classifier_cv_deprecation(data):
# Check that we raise the proper deprecation warning if accessing
# `calibrators_` from the `_CalibratedClassifier`.
X, y = data
calib_clf = CalibratedClassifierCV(cv=2).fit(X, y)
with pytest.warns(FutureWarning):
calibrators = calib_clf.calibrated_classifiers_[0].calibrators_
for clf1, clf2 in zip(
calibrators, calib_clf.calibrated_classifiers_[0].calibrators
):
assert clf1 is clf2<|fim▁end|>
|
clf, method=method, cv=5, ensemble=ensemble
|
<|file_name|>test_storage.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from unittest import TestCase
from scrapy.settings import Settings
from scrapy_tracker.storage.memory import MemoryStorage
from scrapy_tracker.storage.redis import RedisStorage
from scrapy_tracker.storage.sqlalchemy import SqlAlchemyStorage
from tests import TEST_KEY, TEST_CHECKSUM, mock
class TestMemoryStorage(TestCase):
def setUp(self):
self.storage = MemoryStorage(None)
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
class TestSqlAlchemyStorage(TestCase):
def setUp(self):
self.storage = SqlAlchemyStorage(Settings({
'TRACKER_SQLALCHEMY_ENGINE': 'sqlite:///:memory:',
'TRACKER_SQLALCHEMY_FLUSH_DB': True
}))
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
class TestRedisStorage(TestCase):
def setUp(self):<|fim▁hole|> data = {}
def getset(key, val):
old_val = data.get(key)
data[key] = val
return old_val
mock_getset = mock.MagicMock()
mock_getset.getset.side_effect = getset
mock_redis.return_value = mock_getset
self.storage = RedisStorage(Settings({
'TRACKER_RADIS_FLUSH_DB': True
}))
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)<|fim▁end|>
|
with mock.patch("scrapy_tracker.storage.redis.StrictRedis") as mock_redis:
|
<|file_name|>Roundrect_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Baliance. All rights reserved.
//
// DO NOT EDIT: generated by gooxml ECMA-376 generator
//
// Use of this source code is governed by the terms of the Affero GNU General
// Public License version 3.0 as published by the Free Software Foundation and
// appearing in the file LICENSE included in the packaging of this file. A
// commercial license can be purchased by contacting [email protected].
package vml_test
import (
"encoding/xml"
"testing"
<|fim▁hole|> v := vml.NewRoundrect()
if v == nil {
t.Errorf("vml.NewRoundrect must return a non-nil value")
}
if err := v.Validate(); err != nil {
t.Errorf("newly constructed vml.Roundrect should validate: %s", err)
}
}
func TestRoundrectMarshalUnmarshal(t *testing.T) {
v := vml.NewRoundrect()
buf, _ := xml.Marshal(v)
v2 := vml.NewRoundrect()
xml.Unmarshal(buf, v2)
}<|fim▁end|>
|
"baliance.com/gooxml/schema/urn/schemas_microsoft_com/vml"
)
func TestRoundrectConstructor(t *testing.T) {
|
<|file_name|>people_class.py<|end_file_name|><|fim▁begin|>#! /home/nsanthony/miniconda3/bin/python
import inventory.inventory_class as inv
import weapons.weapon_class as wp<|fim▁hole|>
class people:
"""This is the people class with attributes:"""
def name():
n = ''
return n
def health():
hp = 0
return hp
def descript():
d = 'Description of the person or creature'
return d
def equiped():
e = inv.inventory()
e.weapon = wp.weapon()
e.armor = 0
return e
def bag():
b = {}
return b
def hostile():
h = 0
return h<|fim▁end|>
| |
<|file_name|>test_wsse.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jurko Gospodnetić ( [email protected] )
"""
Implemented using the 'pytest' testing framework.
"""
if __name__ == "__main__":
import __init__
__init__.runUsingPyTest(globals())
from suds.wsse import UsernameToken
class TestUsernameToken:
username_token = None
def setup(self):
self.username_token = UsernameToken(
username=b"foouser",
password=b"barpasswd",
)
def test_setnonce_null(self):
self.setup()
self.username_token.setnonce()
assert self.username_token.nonce != None
def test_setnonce_text(self):
self.setup()
self.username_token.setnonce(b"affirm")
assert self.username_token.nonce == b"affirm"<|fim▁end|>
| |
<|file_name|>directshow.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time<|fim▁hole|>import cv2
from ikalog.utils import *
from ikalog.inputs.win.videoinput_wrapper import VideoInputWrapper
from ikalog.inputs import VideoInput
class DirectShow(VideoInput):
# override
def _enumerate_sources_func(self):
return self._videoinput_wrapper.get_device_list()
def read_raw(self):
if self._device_id is None:
return None
frame = self._videoinput_wrapper.get_pixels(
self._device_id,
parameters=(
self._videoinput_wrapper.VI_BGR +
self._videoinput_wrapper.VI_VERTICAL_FLIP
)
)
return frame
# override
def _read_frame_func(self):
frame = self.read_raw()
return frame
# override
def _initialize_driver_func(self):
pass
# override
def _cleanup_driver_func(self):
pass
# override
def _is_active_func(self):
return (self._device_id is not None)
# override
def _select_device_by_index_func(self, source, width=1280, height=720, framerate=59.94):
device_id = int(source)
vi = self._videoinput_wrapper
self.lock.acquire()
try:
if self._device_id is not None:
raise Exception('Need to deinit the device')
formats = [
{'width': width, 'height': height, 'framerate': None},
{'width': width, 'height': height, 'framerate': framerate},
]
for fmt in formats:
if fmt['framerate']:
vi.set_framerate(device_id, fmt['framerate'])
retval = vi.init_device(
device_id,
flags=self._videoinput_wrapper.DS_RESOLUTION,
width=fmt['width'],
height=fmt['height'],
)
if retval:
self._source_width = vi.get_frame_width(device_id)
self._source_height = vi.get_frame_height(device_id)
success = \
(width == self._source_width) and (
height == self._source_height)
if success or (not self.cap_optimal_input_resolution):
self._device_id = device_id
break
vi.deinit_device(device_id)
# end of for loop
if self._device_id is None:
IkaUtils.dprint(
'%s: Failed to init the capture device %d' %
(self, device_id)
)
finally:
self.lock.release()
# override
def _select_device_by_name_func(self, source):
IkaUtils.dprint('%s: Select device by name "%s"' % (self, source))
try:
index = self.enumerate_sources().index(source)
except ValueError:
IkaUtils.dprint('%s: Input "%s" not found' % (self, source))
return False
IkaUtils.dprint('%s: "%s" -> %d' % (self, source, index))
self._select_device_by_index_func(index)
def __init__(self):
self.strict_check = False
self._device_id = None
self._warned_resolution = False
self._videoinput_wrapper = VideoInputWrapper()
super(DirectShow, self).__init__()
if __name__ == "__main__":
obj = DirectShow()
list = obj.enumerate_sources()
for n in range(len(list)):
IkaUtils.dprint("%d: %s" % (n, list[n]))
dev = input("Please input number (or name) of capture device: ")
obj.select_source(dev)
k = 0
while k != 27:
frame = obj.read_frame()
if frame is not None:
cv2.imshow(obj.__class__.__name__, frame)
k = cv2.waitKey(1)
if k == ord('s'):
import time
cv2.imwrite('screenshot_%d.png' % int(time.time()), frame)<|fim▁end|>
|
import threading
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![warn(missing_docs, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications,
unused_results)]
//! # Pascal strings in Rust.
//!
//! A `PascalString`, or `ShortString` is a String which stores its data on the stack. Because of this, it has
//! a fixed maximum size, which cannot be changed. Traditionally, the size of a `PascalString` is 256 bytes -
//! the first byte stores the length, which means that each remaining byte is indexable using only that byte.
//!
//! This is a very niche string type - generally, you are better off using `std::string::String`, or the
//! `AsciiString` type from the `ascii` crate if you need an ascii string. They have no upper size limit, and
//! are cheaper to pass around as they are only 64 bytes on the stack. Generally, you should only use `PascalString` if:
//!
//! * You know that you absolutely, certainly cannot do without heap allocation.
//! * You need to store your string data inline into your `struct` type - for example if you will allocate a bunch
//! of these custom `struct` types into a pool allocator, and cannot afford the heap fragmentation.
//! * You will keep, allocate, and deallocate a *lot* of short strings in your program.
<|fim▁hole|>
/// Ascii encoded pascal strings.
pub mod ascii;
/// Utf8 encoded pascal strings.
pub mod utf8;
const PASCAL_STRING_BUF_SIZE: usize = ::std::u8::MAX as usize;<|fim▁end|>
|
extern crate ascii as ascii_crate;
extern crate odds;
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import * as S from './addTodo.css';
import { ITodo } from '../../../../interfaces/ITodo';
export const AddTodoComponent: angular.IComponentOptions = {
template: `
<form ng-submit="$ctrl.addTodo()">
<input class="${S['new-todo']}" placeholder="What needs to be done?" ng-model="$ctrl.newTodo" autofocus>
<input class="${S['toggle-all']}" type="checkbox" ng-checked="$ctrl.isAllCompleted" ng-click="$ctrl.toggleAll($event)">
</form>
`,
bindings: {
list: '<',
onAdd: '&',
onToggleAll: '&'
},
controller: class implements angular.IController {
onAdd: { (todo: { todo: string; }): void; };
onToggleAll: { (completed: { completed: boolean; }): void; };
newTodo: string = '';
isAllCompleted: boolean;
constructor() { }
$onChanges(changes: { list: angular.IChangesObject<Array<ITodo>> }) {
this.isAllCompleted = changes.list.currentValue ? changes.list.currentValue.every(t => t.completed) : false;
}
addTodo() {
this.onAdd({ todo: this.newTodo });
this.newTodo = '';
}<|fim▁hole|> toggleAll(e: MouseEvent) {
this.onToggleAll({ completed: (<HTMLInputElement>e.target).checked });
}
}
};<|fim▁end|>
| |
<|file_name|>test_actionchain.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import action_chain_runner as acr
from st2actions.container.service import RunnerContainerService
from st2common.constants.action import LIVEACTION_STATUS_RUNNING
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED
from st2common.constants.action import LIVEACTION_STATUS_CANCELED
from st2common.constants.action import LIVEACTION_STATUS_TIMED_OUT
from st2common.constants.action import LIVEACTION_STATUS_FAILED
from st2common.exceptions import actionrunner as runnerexceptions
from st2common.models.api.notification import NotificationsHelper
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.keyvalue import KeyValuePairDB
from st2common.models.system.common import ResourceReference
from st2common.persistence.keyvalue import KeyValuePair
from st2common.persistence.runner import RunnerType
from st2common.services import action as action_service
from st2common.util import action_db as action_db_util
from st2common.exceptions.action import ParameterRenderingFailedException
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
class DummyActionExecution(object):
def __init__(self, status=LIVEACTION_STATUS_SUCCEEDED, result=''):
self.id = None
self.status = status
self.result = result
FIXTURES_PACK = 'generic'
TEST_MODELS = {
'actions': ['a1.yaml', 'a2.yaml', 'action_4_action_context_param.yaml'],
'runners': ['testrunner1.yaml']
}
MODELS = FixturesLoader().load_models(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
ACTION_1 = MODELS['actions']['a1.yaml']
ACTION_2 = MODELS['actions']['a2.yaml']
ACTION_3 = MODELS['actions']['action_4_action_context_param.yaml']
RUNNER = MODELS['runners']['testrunner1.yaml']
CHAIN_1_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain1.yaml')
CHAIN_2_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain2.yaml')
CHAIN_ACTION_CALL_NO_PARAMS_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_call_no_params.yaml')
CHAIN_NO_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain.yaml')
CHAIN_NO_DEFAULT_2 = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain_2.yaml')
CHAIN_BAD_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'bad_default_chain.yaml')
CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_success_path_static_task_name.yaml')
CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_failure_path_static_task_name.yaml')
CHAIN_FIRST_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_first_task_parameter_render_fail.yaml')<|fim▁hole|>CHAIN_LIST_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_list_template.yaml')
CHAIN_DICT_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dict_template.yaml')
CHAIN_DEP_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dependent_input.yaml')
CHAIN_DEP_RESULTS_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dep_result_input.yaml')
MALFORMED_CHAIN_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'malformedchain.yaml')
CHAIN_TYPED_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_params.yaml')
CHAIN_SYSTEM_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_system_params.yaml')
CHAIN_WITH_ACTIONPARAM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_actionparam_vars.yaml')
CHAIN_WITH_SYSTEM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_system_vars.yaml')
CHAIN_WITH_PUBLISH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_publish.yaml')
CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_publish_params_rendering_failure.yaml')
CHAIN_WITH_INVALID_ACTION = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_invalid_action.yaml')
CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_and_parameters.yaml')
CHAIN_ACTION_PARAMS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_attribute.yaml')
CHAIN_ACTION_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_parameters_attribute.yaml')
CHAIN_ACTION_INVALID_PARAMETER_TYPE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_invalid_parameter_type_passed_to_action.yaml')
CHAIN_NOTIFY_API = {'notify': {'on-complete': {'message': 'foo happened.'}}}
CHAIN_NOTIFY_DB = NotificationsHelper.to_model(CHAIN_NOTIFY_API)
@mock.patch.object(action_db_util, 'get_runnertype_by_name',
mock.MagicMock(return_value=RUNNER))
class TestActionChainRunner(DbTestCase):
def test_runner_creation(self):
runner = acr.get_runner()
self.assertTrue(runner)
self.assertTrue(runner.runner_id)
def test_malformed_chain(self):
try:
chain_runner = acr.get_runner()
chain_runner.entry_point = MALFORMED_CHAIN_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
self.assertTrue(False, 'Expected pre_run to fail.')
except runnerexceptions.ActionRunnerPreRunError:
self.assertTrue(True)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_chain_second_task_times_out(self, request):
# Second task in the chain times out so the action chain status should be timeout
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_2_PATH
chain_runner.action = ACTION_1
original_run_action = chain_runner._run_action
def mock_run_action(*args, **kwargs):
original_live_action = args[0]
liveaction = original_run_action(*args, **kwargs)
if original_live_action.action == 'wolfpack.a2':
# Mock a timeout for second task
liveaction.status = LIVEACTION_STATUS_TIMED_OUT
return liveaction
chain_runner._run_action = mock_run_action
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_TIMED_OUT)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_task_is_canceled_while_running(self, request):
# Second task in the action is CANCELED, make sure runner doesn't get stuck in an infinite
# loop
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_2_PATH
chain_runner.action = ACTION_1
original_run_action = chain_runner._run_action
def mock_run_action(*args, **kwargs):
original_live_action = args[0]
if original_live_action.action == 'wolfpack.a2':
status = LIVEACTION_STATUS_CANCELED
else:
status = LIVEACTION_STATUS_SUCCEEDED
request.return_value = (DummyActionExecution(status=status), None)
liveaction = original_run_action(*args, **kwargs)
return liveaction
chain_runner._run_action = mock_run_action
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_CANCELED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# Chain count should be 2 since the last task doesn't get called since the second one was
# canceled
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_task_action_call_with_no_params(self, request):
# Make sure that the runner doesn't explode if task definition contains
# no "params" section
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_CALL_NO_PARAMS_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_no_default(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_NO_DEFAULT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# In case of this chain default_node is the first_node.
default_node = chain_runner.chain_holder.actionchain.default
first_node = chain_runner.chain_holder.actionchain.chain[0]
self.assertEqual(default_node, first_node.name)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_no_default_multiple_options(self, request):
# subtle difference is that when there are multiple possible default nodes
# the order per chain definition may not be preseved. This is really a
# poorly formatted chain but we still the best attempt to work.
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_NO_DEFAULT_2
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# In case of this chain default_node is the first_node.
default_node = chain_runner.chain_holder.actionchain.default
first_node = chain_runner.chain_holder.actionchain.chain[0]
self.assertEqual(default_node, first_node.name)
# based on the chain the callcount is known to be 2.
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_bad_default(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BAD_DEFAULT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = 'Unable to find node with name "bad_default" referenced in "default".'
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch('eventlet.sleep', mock.MagicMock())
@mock.patch.object(action_db_util, 'get_liveaction_by_id', mock.MagicMock(
return_value=DummyActionExecution()))
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(status=LIVEACTION_STATUS_RUNNING), None))
def test_chain_runner_success_path_with_wait(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(status=LIVEACTION_STATUS_FAILED), None))
def test_chain_runner_failure_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 2. Not great but works.
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(), None))
def test_chain_runner_broken_on_success_path_static_task_name(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Unable to find node with name "c5" referenced in "on-success" '
'in task "c2"')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(), None))
def test_chain_runner_broken_on_failure_path_static_task_name(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Unable to find node with name "c6" referenced in "on-failure" '
'in task "c2"')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', side_effect=RuntimeError('Test Failure.'))
def test_chain_runner_action_exception(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, results, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 2. Not great but works.
self.assertEqual(request.call_count, 2)
error_count = 0
for task_result in results['tasks']:
if task_result['result'].get('error', None):
error_count += 1
self.assertEqual(error_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_str_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, {"p1": "1"})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_list_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_LIST_TEMP_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, {"p1": "[2, 3, 4]"})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_dict_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DICT_TEMP_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {"p1": {"p1.3": "[3, 4]", "p1.2": "2", "p1.1": "1"}}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'o1': '1'}), None))
def test_chain_runner_dependent_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DEP_INPUT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_values = [{u'p1': u'1'},
{u'p1': u'1'},
{u'p2': u'1', u'p3': u'1', u'p1': u'1'}]
# Each of the call_args must be one of
for call_args in request.call_args_list:
self.assertTrue(call_args[0][0].parameters in expected_values)
expected_values.remove(call_args[0][0].parameters)
self.assertEqual(len(expected_values), 0, 'Not all expected values received.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'o1': '1'}), None))
def test_chain_runner_dependent_results_param(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DEP_RESULTS_INPUT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_values = [{u'p1': u'1'},
{u'p1': u'1'},
{u'out': u"{'c2': {'o1': '1'}, 'c1': {'o1': '1'}}"}]
# Each of the call_args must be one of
self.assertEqual(request.call_count, 3)
for call_args in request.call_args_list:
self.assertTrue(call_args[0][0].parameters in expected_values)
expected_values.remove(call_args[0][0].parameters)
self.assertEqual(len(expected_values), 0, 'Not all expected values received.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(RunnerType, 'get_by_name',
mock.MagicMock(return_value=RUNNER))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_missing_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertEqual(request.call_count, 0, 'No call expected.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_failure_during_param_rendering_single_task(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, result, _ = chain_runner.run({})
# No tasks ran because rendering of parameters for the first task failed
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertEqual(result['tasks'], [])
self.assertTrue('error' in result)
self.assertTrue('traceback' in result)
self.assertTrue('Failed to run task "c1". Parameter rendering failed' in result['error'])
self.assertTrue('Traceback' in result['traceback'])
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_failure_during_param_rendering_multiple_tasks(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_SECOND_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, result, _ = chain_runner.run({})
# Verify that only first task has ran
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertEqual(len(result['tasks']), 1)
self.assertEqual(result['tasks'][0]['name'], 'c1')
expected_error = ('Failed rendering value for action parameter "p1" in '
'task "c2" (template string={{s1}}):')
self.assertTrue('error' in result)
self.assertTrue('traceback' in result)
self.assertTrue('Failed to run task "c2". Parameter rendering failed' in result['error'])
self.assertTrue(expected_error in result['error'])
self.assertTrue('Traceback' in result['traceback'])
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_typed_params(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_TYPED_PARAMS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 'two', 's3': 3.14})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'booltype': True,
'inttype': 1,
'numbertype': 3.14,
'strtype': 'two',
'arrtype': ['1', 'two'],
'objtype': {'s2': 'two',
'k1': '1'}}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_typed_system_params(self, request):
kvps = []
try:
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a', value='1')))
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a.b.c', value='two')))
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_SYSTEM_PARAMS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two'}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
finally:
for kvp in kvps:
KeyValuePair.delete(kvp)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_vars_system_params(self, request):
kvps = []
try:
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a', value='two')))
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_SYSTEM_VARS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two',
'strtype_legacy': 'two',
'booltype': True}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
finally:
for kvp in kvps:
KeyValuePair.delete(kvp)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_vars_action_params(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_ACTIONPARAM_VARS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'input_a': 'two'})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two',
'booltype': True}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'raw_out': 'published'}), None))
def test_chain_runner_publish(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_PUBLISH
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.runner_parameters = {'display_published': True}
chain_runner.pre_run()
action_parameters = {'action_param_1': 'test value 1'}
_, result, _ = chain_runner.run(action_parameters=action_parameters)
# We also assert that the action parameters are available in the
# "publish" scope
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'published',
'booltype': True,
'published_action_param': action_parameters['action_param_1']}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
# Assert that the variables are correctly published
self.assertEqual(result['published'],
{'published_action_param': u'test value 1', 'o1': u'published'})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_publish_param_rendering_failure(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
try:
chain_runner.run({})
except ParameterRenderingFailedException as e:
# TODO: Should we treat this as task error? Right now it bubbles all
# the way up and it's not really consistent with action param
# rendering failure
expected_error = ('Failed rendering value for publish parameter "p1" in '
'task "c2" (template string={{ not_defined }}):')
self.assertTrue(expected_error in str(e))
pass
else:
self.fail('Exception was not thrown')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_task_passes_invalid_parameter_type_to_action(self, mock_request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_INVALID_PARAMETER_TYPE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
action_parameters = {}
expected_msg = ('Failed to cast value "stringnotanarray" \(type: str\) for parameter '
'"arrtype" of type "array"')
self.assertRaisesRegexp(ValueError, expected_msg, chain_runner.run,
action_parameters=action_parameters)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=None))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'raw_out': 'published'}), None))
def test_action_chain_runner_referenced_action_doesnt_exist(self, mock_request):
# Action referenced by a task doesn't exist, should result in a top level error
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_INVALID_ACTION
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
expected_error = ('Failed to run task "c1". Action with reference "wolfpack.a2" '
'doesn\'t exist.')
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertTrue(expected_error in output['error'])
self.assertTrue('Traceback' in output['traceback'], output['traceback'])
def test_exception_is_thrown_if_both_params_and_parameters_attributes_are_provided(self):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Either "params" or "parameters" attribute needs to be provided, but '
'not both')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError, expected_msg,
chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_params_and_parameters_attributes_both_work(self, _):
# "params" attribute used
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
original_build_liveaction_object = chain_runner._build_liveaction_object
def mock_build_liveaction_object(action_node, resolved_params, parent_context):
# Verify parameters are correctly passed to the action
self.assertEqual(resolved_params, {'pparams': 'v1'})
original_build_liveaction_object(action_node=action_node,
resolved_params=resolved_params,
parent_context=parent_context)
chain_runner._build_liveaction_object = mock_build_liveaction_object
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
# "parameters" attribute used
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMETERS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
def mock_build_liveaction_object(action_node, resolved_params, parent_context):
# Verify parameters are correctly passed to the action
self.assertEqual(resolved_params, {'pparameters': 'v1'})
original_build_liveaction_object(action_node=action_node,
resolved_params=resolved_params,
parent_context=parent_context)
chain_runner._build_liveaction_object = mock_build_liveaction_object
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
@classmethod
def tearDownClass(cls):
FixturesLoader().delete_models_from_db(MODELS)<|fim▁end|>
|
CHAIN_SECOND_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_second_task_parameter_render_fail.yaml')
|
<|file_name|>bsp.cpp<|end_file_name|><|fim▁begin|>// bsp.cpp -- by Thatcher Ulrich <[email protected]> 2001
// This code is in the public domain.
// Code to make a collision-bsp tree out of triangle soup.
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <engine/utility.h>
#include <engine/bsp.h>
const bool print_debug = 0;
/*
sort faces by size? Or order them randomly?;
make root bsp node w/ the first face;
for rest of faces {
root->add_face( a, b, c, plane( normal, d ) );
}
*/
enum plane_class {
INSIDE = -1,
ON = 0,
OUTSIDE = 1
};
////const float BSP_SLOP = 1.1923435f;
//const float BSP_SLOP = 0.1f;
plane_class classify_point( const plane_info& p, vec3 a, float slop )
// Classify the given point with respect to the given plane; use a tolerance
// of +/- slop to determine when a point is ON the plane.
{
float distance = p.normal * a - p.d;
if ( distance < -slop ) {
return INSIDE;
} else if ( distance > slop ) {
if ( print_debug ) {
printf( "d = %f, pn = %f %f %f, pd = %f, a = %f %f %f, p*a = %f\n",
distance, p.normal.get_x(), p.normal.get_y(), p.normal.get_z(), p.d,
a.get_x(), a.get_y(), a.get_z(),
p.normal * a
); //xxxx
}
return OUTSIDE;
} else {
return ON;
}
}
vec3 intersect( const plane_info& p, const vec3& a, const vec3& b )
// Returns the point of intersection between the plane p and the
// line segment defined by a and b.
//
// NB: does not check to ensure that a & b intersects p!
{
float da = p.normal * a - p.d;
float db = p.normal * b - p.d;
<|fim▁hole|>
if ( fabs( diff ) < 0.000001f ) {
// Segment is parallel to plane. Just pick the midpoint of
// the segment as the intersection point.
return ( a + b ) * 0.5f;
} else {
// Compute how far along the segment the intersection is.
float f = ( 0 - da ) / diff;
// printf( "f = %f\n", f );//xxxxx
return a + ( b - a ) * f;
}
}
bsp_node::bsp_node( const plane_info& p )
// Constructor. Make a root node using the given plane info.
{
m_plane = p;
m_inside = m_outside = 0;
m_partitioning_plane = true;
m_face_list = 0;
m_face_count = 0;
}
bsp_node::~bsp_node()
// Destructor. Delete our child trees recursively.
{
if ( m_inside ) {
delete m_inside;
}
if ( m_outside ) {
delete m_outside;
}
if ( m_face_list ) {
free( m_face_list ); // using malloc/free because of realloc...
}
}
void bsp_node::add_partition( const plane_info& p )
// Add a partitioning plane to this bsp tree. Basically propagates
// the plane and adds it to all leaf nodes.
//
// NB: VERY IMPORTANT: You must add partitioning planes before adding
// faces. Otherwise volume queries will be wrong.
{
if ( m_partitioning_plane != true ) {
assert(0);
// Trying to add a partitioning plane to a tree that already
// contains faces. Refuse to add the plane.
return;
}
if ( m_inside ) {
m_inside->add_partition( p );
} else {
m_inside = new bsp_node( p );
}
if ( m_outside ) {
m_outside->add_partition( p );
} else {
m_outside = new bsp_node( p );
}
}
void bsp_node::add_face( const vec3& a, const vec3& b, const vec3& c, const plane_info& p, int face_index, float slop )
// Inserts the given triangle into this node's bsp tree. face_index
// is a reference to the original face, which is passed back during
// ray_cast() if the caller gives a pointer to a face_test_callback
// function.
//
// The slop parameter determines how far a vertex may be from a
// splitting plane and still be considered "on" the plane.
//
// @@ slop could possibly be replaced by using a fraction of the
// longest face edge length.
{
// Classify the three verts of the triangle w/r/t this node's plane.
plane_class ca = classify_point( m_plane, a, slop );
plane_class cb = classify_point( m_plane, b, slop );
plane_class cc = classify_point( m_plane, c, slop );
if ( print_debug ) {
//x printf( "ca = %d, cb = %d, cc = %d\n", ca, cb, cc );//xxx
}
if ( ca == ON && cb == ON && cc == ON ) {
// All points are in this node's plane.
if ( p.normal * m_plane.normal < 0 ) {
// Face's plane and our node plane are opposite each other.
// Add the face on the outside of this plane.
add_outside_face( a, b, c, p, face_index, slop );
} else {
// This face fits in this node. Add the face index to our list.
if ( print_debug ) printf( "o\n" );//xxxxx
insert_into_face_list( face_index );
}
return;
} else if ( ( ca && cb && ( ca != cb ) )
|| ( cb && cc && ( cb != cc ) )
|| ( cc && ca && ( cc != ca ) ) )
{
// printf( "*\n" );
// return; //xxxxxxxx
// This triangle straddles the plane.
// Make references to the verts, so we can sort them.
const vec3* pa = &a;
const vec3* pb = &b;
const vec3* pc = &c;
// Sort the vert references so that *pa is the most inside, and *pc is the most outside.
const vec3* t;
plane_class ct;
if ( ca > cb ) {
t = pa; pa = pb; pb = t; // swap( &a, &b );
ct = ca; ca = cb; cb = ct;
}
if ( cb > cc ) {
t = pb; pb = pc; pc = t; // swap( &b, &c );
ct = cb; cb = cc; cc = ct;
}
if ( ca > cb ) {
t = pa; pa = pb; pb = t; // swap( &a, &b );
ct = ca; ca = cb; cb = ct;
}
if ( cb == INSIDE ) {
if ( print_debug ) printf( "^" );//xxxx
// a and b are INSIDE the plane, c is OUTSIDE.
vec3 d = intersect( m_plane, *pa, *pc );
vec3 e = intersect( m_plane, *pb, *pc );
add_inside_face( *pa, *pb, d, p, face_index, slop );
add_inside_face( *pb, d, e, p, face_index, slop );
add_outside_face( d, e, *pc, p, face_index, slop );
} else if ( cb == ON ) {
if ( print_debug ) printf( "<" );//xxxx
// a is INSIDE, b is ON, c is OUTSIDE.
vec3 d = intersect( m_plane, *pa, *pc );
add_inside_face( *pa, *pb, d, p, face_index, slop );
add_outside_face( *pb, d, *pc, p, face_index, slop );
} else {
if ( print_debug ) printf( "V: " );//xxxx
// a is INSIDE, b and c are OUTSIDE.
vec3 d = intersect( m_plane, *pa, *pb );
vec3 e = intersect( m_plane, *pa, *pc );
add_inside_face( *pa, d, e, p, face_index, slop );
add_outside_face( d, e, *pb, p, face_index, slop );
add_outside_face( e, *pb, *pc, p, face_index, slop );
}
} else {
// This triangle is fully on one side of the plane or the other.
if ( ca == INSIDE || cb == INSIDE || cc == INSIDE ) {
add_inside_face( a, b, c, p, face_index, slop );
} else {
add_outside_face( a, b, c, p, face_index, slop );
}
}
}
void bsp_node::add_inside_face( const vec3& a, const vec3& b, const vec3& c, const plane_info& p, int face_index, float slop )
// Adds the given triangle with the specified plane info to the inside
// half of this node. Creates a new inside node if necessary.
{
if ( print_debug ) printf( "/" );//xxxx
if ( m_inside ) {
m_inside->add_face( a, b, c, p, face_index, slop );
} else {
if ( print_debug ) printf( "x\n" );//xxxxx
m_inside = new bsp_node( p );
m_inside->insert_into_face_list( face_index );
}
}
void bsp_node::add_outside_face( const vec3& a, const vec3& b, const vec3& c, const plane_info& p, int face_index, float slop )
// Adds the given triangle with the specified plane info to the outside
// half-space of this node. Creates a new outside node if necessary.
{
if ( print_debug ) printf( "\\" );//xxxx
if ( m_outside ) {
m_outside->add_face( a, b, c, p, face_index, slop );
} else {
if ( print_debug ) printf( "y\n" );//xxxxx
m_outside = new bsp_node( p );
m_outside->insert_into_face_list( face_index );
}
}
void bsp_node::insert_into_face_list( int face_index )
// Adds the given face index into our array of face lists.
{
m_face_count++;
// Make storage for the new face index.
if ( m_face_list ) {
m_face_list = (int*) realloc( m_face_list, m_face_count * sizeof( face_index ) );
} else {
m_face_list = (int*) malloc( m_face_count * sizeof( face_index ) );
}
m_face_list[ m_face_count - 1 ] = face_index;
// Mark this node as having geometry.
m_partitioning_plane = false;
}
bool bsp_node::ray_cast( collision_info* result, const vec3& p0, const vec3& dir, float distance, bool (*face_test_callback)( const vec3& normal, const vec3& pt, int face_index ) )
// Cast the specified ray through this BSP tree. Finds the nearest
// hit, if any. If a hit is found, the results are put in *result and
// returns true; otherwise returns false and leaves *result alone.
//
// If face_test_callback is not null, then use it to verify hits.
// This function will call face_test_callback() with candidate
// intersection points and a face_index. The candidate point is
// guaranteed to be in the plane of the face, so the callback just
// needs to determine if the point is within the triangle, and return
// true if it is.
{
float dot = dir * m_plane.normal;
float d0 = p0 * m_plane.normal - m_plane.d;
float delta_n = dot * distance;
float d1 = d0 + delta_n;
bool parallel = fabs( dot ) < 0.000001f;
// printf( "%f %f %f %f\n", d0, d1, dot, distance ); //xxxxx
if ( parallel == false ) {
// The ray might cross our plane.
float hit_distance = -d0 / dot;
// printf( "hd = %f\n", hit_distance );//xxxxx
if ( d0 > 0 && d1 <= 0 ) {
//
// Ray crosses from the outside to the inside of this half-space.
//
// printf( "+" ); //xxxxxx
// Check the first half of the ray against the outside.
if ( m_outside
&& m_outside->ray_cast( result, p0, dir, hit_distance, face_test_callback ) )
{
return true;
}
vec3 hit_point = p0 + dir * hit_distance;
if ( m_partitioning_plane == false ) {
// If the crossing point is on the inside of our inside
// tree, then we have a potential hit.
// if ( m_inside == 0
// || m_inside->test_point( hit_point ) )
// {
// Check the faces for a hit.
int i;
for ( i = 0; i < m_face_count; i++ ) {
if ( face_test_callback == 0
|| face_test_callback( m_plane.normal, hit_point, m_face_list[ i ] ) )
{
result->point = hit_point;
result->normal = m_plane.normal;
// get face properties from callback?
return true;
}
}
// }
}
// No hits so far... check the inside portion of the ray.
return m_inside &&
m_inside->ray_cast( result, hit_point, dir, distance - hit_distance, face_test_callback );
}
if ( d0 <= 0 && d1 > 0 ) {
//
// Ray crosses from the inside to the outside of this half-space.
//
// Check the first half of the ray against the inside.
if ( m_inside
&& m_inside->ray_cast( result, p0, dir, hit_distance, face_test_callback ) )
{
return true;
}
vec3 hit_point = p0 + dir * hit_distance;
// If no hit, check the second half against the outside.
return m_outside
&& m_outside->ray_cast( result, hit_point, dir, distance - hit_distance, face_test_callback );
}
}
//
// Ray does not cross our plane. Check which side of the plane
// the ray is on, and only check that side.
//
// printf( "O\n" ); //xxxx
if ( d0 <= 0 ) {
// printf( "/" );//xxx
return m_inside
&& m_inside->ray_cast( result, p0, dir, distance, face_test_callback );
} else {
// printf( "\\" );//xxx
return m_outside
&& m_outside->ray_cast( result, p0, dir, distance, face_test_callback );
}
}
bool bsp_node::test_point( const vec3& a )
// Return true if the given point is inside our volume; false otherwise.
{
float d = m_plane.normal * a - m_plane.d;
if ( d <= 0 ) {
// Point is inside this node.
if ( m_inside == 0 && m_partitioning_plane == true ) {
// Point is in an empty partitioning volume. Assume we're
// outside any volume.
//
// @@ This is not strictly correct, because we could be
// inside a volume defined by faces which entirely enclose
// the partitioning volume, without intersecting it. Such
// volumes will have hollow cells inside them defined by
// the partitioning planes. However, this shouldn't be a
// practical problem for ray tracing, because valid test
// points will be on a real volume plane.
//
// There is one potential failure case: if a ray hits a
// big volume very near a point where the corner or edge
// of an internal partitioning volume touches the big
// volume, it's conceivable that the raycast will miss due
// to the test_point() call returning false for that
// point. Hm. One solution would be to mark leaf
// partitioning planes that are inside such volumes with
// an "inside" flag; in which case we'd return "true"
// here, and everything would be hunky-dory.
return false;
}
return m_inside == 0
|| m_inside->test_point( a );
} else {
// Point is on the outside of this node...
return m_outside
&& m_outside->test_point( a );
}
}<|fim▁end|>
|
float diff = db - da;
|
<|file_name|>process.py<|end_file_name|><|fim▁begin|>from tornado.process import cpu_count, _reseed_random
from tornado import ioloop
import logging
import os
import signal
import sys
from tornado.util import errno_from_exception
import errno
logger = logging.getLogger(__name__)
_task_id = None
exiting = False
def fork_processes(num_processes, max_restarts=100):
"""Starts multiple worker processes.
If ``num_processes`` is None or <= 0, we detect the number of cores
available on this machine and fork that number of child
processes. If ``num_processes`` is given and > 0, we fork that
specific number of sub-processes.
Since we use processes and not threads, there is no shared memory
between any server code.
Note that multiple processes are not compatible with the autoreload
module (or the ``autoreload=True`` option to `tornado.web.Application`
which defaults to True when ``debug=True``).
When using multiple processes, no IOLoops can be created or
referenced until after the call to ``fork_processes``.
In each child process, ``fork_processes`` returns its *task id*, a
number between 0 and ``num_processes``. Processes that exit
abnormally (due to a signal or non-zero exit status) are restarted
with the same id (up to ``max_restarts`` times). In the parent
process, ``fork_processes`` returns None if all child processes
have exited normally, but will otherwise only exit by throwing an
exception.
"""
global _task_id
assert _task_id is None
if num_processes is None or num_processes <= 0:
num_processes = cpu_count()
logger.info("Starting %d processes", num_processes)
<|fim▁hole|> def start_child(i):
pid = os.fork()
if pid == 0:
# child process
_reseed_random()
global _task_id
_task_id = i
return i
else:
children[pid] = i
return None
for i in range(num_processes):
id = start_child(i)
if id is not None:
return id
global exiting
exiting = False
def receive_signal(sig, frame):
logger.debug('Received signal')
global exiting
exiting = True
for pid, taskid in children.items():
os.kill(pid, signal.SIGTERM)
signal.signal(signal.SIGTERM, receive_signal)
signal.signal(signal.SIGINT, receive_signal)
num_restarts = 0
while children and not exiting:
logger.debug('Exiting : %s' % exiting)
try:
pid, status = os.wait()
except OSError as e:
if errno_from_exception(e) == errno.EINTR:
continue
raise
if pid not in children:
continue
id = children.pop(pid)
if os.WIFSIGNALED(status):
logger.warning("child %d (pid %d) killed by signal %d, restarting",
id, pid, os.WTERMSIG(status))
elif os.WEXITSTATUS(status) != 0:
logger.warning("child %d (pid %d) exited with status %d, restarting",
id, pid, os.WEXITSTATUS(status))
else:
logger.info("child %d (pid %d) exited normally", id, pid)
continue
num_restarts += 1
if num_restarts > max_restarts:
raise RuntimeError("Too many child restarts, giving up")
new_id = start_child(id)
if new_id is not None:
return new_id
# All child processes exited cleanly, so exit the master process
# instead of just returning to right after the call to
# fork_processes (which will probably just start up another IOLoop
# unless the caller checks the return value).
sys.exit(0)<|fim▁end|>
|
children = {}
|
<|file_name|>deserializeform.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.6.3
/*!
@author Branko Vukelic <[email protected]>
@license MIT
*/
var _this = this;
if (typeof define !== 'function' || !define.amd) {
this.require = function(dep) {
return (function() {
switch (dep) {
case 'jquery':
return _this.jQuery;
default:
return null;
}
})() || (function() {
throw new Error("Unmet dependency " + dep);
})();
};
this.define = function(factory) {
return _this.ribcage.utils.deserializeForm = factory(_this.require);
};
}
define(function(require) {
var $;
$ = require('jquery');
$.deserializeForm = function(form, data) {
form = $(form);
form.find(':input').each(function() {
var currentValue, input, name, type;
input = $(this);
name = input.attr('name');
type = input.attr('type');
currentValue = input.val();<|fim▁hole|> case 'checkbox':
return input.prop('checked', data[name] === 'on');
case 'radio':
return input.prop('checked', data[name] === currentValue);
default:
return input.val(data[name]);
}
});
return form;
};
$.fn.deserializeForm = function(data) {
return $.deserializeForm(this, data);
};
return $.deserializeForm;
});<|fim▁end|>
|
if (!name) {
return;
}
switch (type) {
|
<|file_name|>ip_lib.py<|end_file_name|><|fim▁begin|><|fim▁hole|># All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import netaddr
from oslo.config import cfg
from neutron.agent.linux import utils
from neutron.common import exceptions
OPTS = [
cfg.BoolOpt('ip_lib_force_root',
default=False,
help=_('Force ip_lib calls to use the root helper')),
]
LOOPBACK_DEVNAME = 'lo'
# NOTE(ethuleau): depend of the version of iproute2, the vlan
# interface details vary.
VLAN_INTERFACE_DETAIL = ['vlan protocol 802.1q',
'vlan protocol 802.1Q',
'vlan id']
class SubProcessBase(object):
def __init__(self, root_helper=None, namespace=None,
log_fail_as_error=True):
self.root_helper = root_helper
self.namespace = namespace
self.log_fail_as_error = log_fail_as_error
try:
self.force_root = cfg.CONF.ip_lib_force_root
except cfg.NoSuchOptError:
# Only callers that need to force use of the root helper
# need to register the option.
self.force_root = False
def _run(self, options, command, args):
if self.namespace:
return self._as_root(options, command, args)
elif self.force_root:
# Force use of the root helper to ensure that commands
# will execute in dom0 when running under XenServer/XCP.
return self._execute(options, command, args, self.root_helper,
log_fail_as_error=self.log_fail_as_error)
else:
return self._execute(options, command, args,
log_fail_as_error=self.log_fail_as_error)
def enforce_root_helper(self):
if not self.root_helper and os.geteuid() != 0:
raise exceptions.SudoRequired()
def _as_root(self, options, command, args, use_root_namespace=False):
self.enforce_root_helper()
namespace = self.namespace if not use_root_namespace else None
return self._execute(options,
command,
args,
self.root_helper,
namespace,
log_fail_as_error=self.log_fail_as_error)
@classmethod
def _execute(cls, options, command, args, root_helper=None,
namespace=None, log_fail_as_error=True):
opt_list = ['-%s' % o for o in options]
if namespace:
ip_cmd = ['ip', 'netns', 'exec', namespace, 'ip']
else:
ip_cmd = ['ip']
return utils.execute(ip_cmd + opt_list + [command] + list(args),
root_helper=root_helper,
log_fail_as_error=log_fail_as_error)
def set_log_fail_as_error(self, fail_with_error):
self.log_fail_as_error = fail_with_error
class IPWrapper(SubProcessBase):
def __init__(self, root_helper=None, namespace=None):
super(IPWrapper, self).__init__(root_helper=root_helper,
namespace=namespace)
self.netns = IpNetnsCommand(self)
def device(self, name):
return IPDevice(name, self.root_helper, self.namespace)
def get_devices(self, exclude_loopback=False):
retval = []
output = self._execute(['o', 'd'], 'link', ('list',),
self.root_helper, self.namespace)
for line in output.split('\n'):
if '<' not in line:
continue
tokens = line.split(' ', 2)
if len(tokens) == 3:
if any(v in tokens[2] for v in VLAN_INTERFACE_DETAIL):
delimiter = '@'
else:
delimiter = ':'
name = tokens[1].rpartition(delimiter)[0].strip()
if exclude_loopback and name == LOOPBACK_DEVNAME:
continue
retval.append(IPDevice(name,
self.root_helper,
self.namespace))
return retval
def add_tuntap(self, name, mode='tap'):
self._as_root('', 'tuntap', ('add', name, 'mode', mode))
return IPDevice(name, self.root_helper, self.namespace)
def add_veth(self, name1, name2, namespace2=None):
args = ['add', name1, 'type', 'veth', 'peer', 'name', name2]
if namespace2 is None:
namespace2 = self.namespace
else:
self.ensure_namespace(namespace2)
args += ['netns', namespace2]
self._as_root('', 'link', tuple(args))
return (IPDevice(name1, self.root_helper, self.namespace),
IPDevice(name2, self.root_helper, namespace2))
def del_veth(self, name):
"""Delete a virtual interface between two namespaces."""
self._as_root('', 'link', ('del', name))
def ensure_namespace(self, name):
if not self.netns.exists(name):
ip = self.netns.add(name)
lo = ip.device(LOOPBACK_DEVNAME)
lo.link.set_up()
else:
ip = IPWrapper(self.root_helper, name)
return ip
def namespace_is_empty(self):
return not self.get_devices(exclude_loopback=True)
def garbage_collect_namespace(self):
"""Conditionally destroy the namespace if it is empty."""
if self.namespace and self.netns.exists(self.namespace):
if self.namespace_is_empty():
self.netns.delete(self.namespace)
return True
return False
def add_device_to_namespace(self, device):
if self.namespace:
device.link.set_netns(self.namespace)
def add_vxlan(self, name, vni, group=None, dev=None, ttl=None, tos=None,
local=None, port=None, proxy=False):
cmd = ['add', name, 'type', 'vxlan', 'id', vni]
if group:
cmd.extend(['group', group])
if dev:
cmd.extend(['dev', dev])
if ttl:
cmd.extend(['ttl', ttl])
if tos:
cmd.extend(['tos', tos])
if local:
cmd.extend(['local', local])
if proxy:
cmd.append('proxy')
# tuple: min,max
if port and len(port) == 2:
cmd.extend(['port', port[0], port[1]])
elif port:
raise exceptions.NetworkVxlanPortRangeError(vxlan_range=port)
self._as_root('', 'link', cmd)
return (IPDevice(name, self.root_helper, self.namespace))
@classmethod
def get_namespaces(cls, root_helper):
output = cls._execute('', 'netns', ('list',), root_helper=root_helper)
return [l.strip() for l in output.split('\n')]
class IpRule(IPWrapper):
def add_rule_from(self, ip, table, rule_pr):
args = ['add', 'from', ip, 'lookup', table, 'priority', rule_pr]
ip = self._as_root('', 'rule', tuple(args))
return ip
def delete_rule_priority(self, rule_pr):
args = ['del', 'priority', rule_pr]
ip = self._as_root('', 'rule', tuple(args))
return ip
class IPDevice(SubProcessBase):
def __init__(self, name, root_helper=None, namespace=None):
super(IPDevice, self).__init__(root_helper=root_helper,
namespace=namespace)
self.name = name
self.link = IpLinkCommand(self)
self.addr = IpAddrCommand(self)
self.route = IpRouteCommand(self)
self.neigh = IpNeighCommand(self)
def __eq__(self, other):
return (other is not None and self.name == other.name
and self.namespace == other.namespace)
def __str__(self):
return self.name
class IpCommandBase(object):
COMMAND = ''
def __init__(self, parent):
self._parent = parent
def _run(self, *args, **kwargs):
return self._parent._run(kwargs.get('options', []), self.COMMAND, args)
def _as_root(self, *args, **kwargs):
return self._parent._as_root(kwargs.get('options', []),
self.COMMAND,
args,
kwargs.get('use_root_namespace', False))
class IpDeviceCommandBase(IpCommandBase):
@property
def name(self):
return self._parent.name
class IpLinkCommand(IpDeviceCommandBase):
COMMAND = 'link'
def set_address(self, mac_address):
self._as_root('set', self.name, 'address', mac_address)
def set_mtu(self, mtu_size):
self._as_root('set', self.name, 'mtu', mtu_size)
def set_up(self):
self._as_root('set', self.name, 'up')
def set_down(self):
self._as_root('set', self.name, 'down')
def set_netns(self, namespace):
self._as_root('set', self.name, 'netns', namespace)
self._parent.namespace = namespace
def set_name(self, name):
self._as_root('set', self.name, 'name', name)
self._parent.name = name
def set_alias(self, alias_name):
self._as_root('set', self.name, 'alias', alias_name)
def delete(self):
self._as_root('delete', self.name)
@property
def address(self):
return self.attributes.get('link/ether')
@property
def state(self):
return self.attributes.get('state')
@property
def mtu(self):
return self.attributes.get('mtu')
@property
def qdisc(self):
return self.attributes.get('qdisc')
@property
def qlen(self):
return self.attributes.get('qlen')
@property
def alias(self):
return self.attributes.get('alias')
@property
def attributes(self):
return self._parse_line(self._run('show', self.name, options='o'))
def _parse_line(self, value):
if not value:
return {}
device_name, settings = value.replace("\\", '').split('>', 1)
tokens = settings.split()
keys = tokens[::2]
values = [int(v) if v.isdigit() else v for v in tokens[1::2]]
retval = dict(zip(keys, values))
return retval
class IpAddrCommand(IpDeviceCommandBase):
COMMAND = 'addr'
def add(self, ip_version, cidr, broadcast, scope='global'):
self._as_root('add',
cidr,
'brd',
broadcast,
'scope',
scope,
'dev',
self.name,
options=[ip_version])
def delete(self, ip_version, cidr):
self._as_root('del',
cidr,
'dev',
self.name,
options=[ip_version])
def flush(self):
self._as_root('flush', self.name)
def list(self, scope=None, to=None, filters=None):
if filters is None:
filters = []
retval = []
if scope:
filters += ['scope', scope]
if to:
filters += ['to', to]
for line in self._run('show', self.name, *filters).split('\n'):
line = line.strip()
if not line.startswith('inet'):
continue
parts = line.split()
if parts[0] == 'inet6':
version = 6
scope = parts[3]
broadcast = '::'
else:
version = 4
if parts[2] == 'brd':
broadcast = parts[3]
scope = parts[5]
else:
# sometimes output of 'ip a' might look like:
# inet 192.168.100.100/24 scope global eth0
# and broadcast needs to be calculated from CIDR
broadcast = str(netaddr.IPNetwork(parts[1]).broadcast)
scope = parts[3]
retval.append(dict(cidr=parts[1],
broadcast=broadcast,
scope=scope,
ip_version=version,
dynamic=('dynamic' == parts[-1])))
return retval
class IpRouteCommand(IpDeviceCommandBase):
COMMAND = 'route'
def add_gateway(self, gateway, metric=None, table=None):
args = ['replace', 'default', 'via', gateway]
if metric:
args += ['metric', metric]
args += ['dev', self.name]
if table:
args += ['table', table]
self._as_root(*args)
def delete_gateway(self, gateway=None, table=None):
args = ['del', 'default']
if gateway:
args += ['via', gateway]
args += ['dev', self.name]
if table:
args += ['table', table]
self._as_root(*args)
def list_onlink_routes(self):
def iterate_routes():
output = self._run('list', 'dev', self.name, 'scope', 'link')
for line in output.split('\n'):
line = line.strip()
if line and not line.count('src'):
yield line
return [x for x in iterate_routes()]
def add_onlink_route(self, cidr):
self._as_root('replace', cidr, 'dev', self.name, 'scope', 'link')
def delete_onlink_route(self, cidr):
self._as_root('del', cidr, 'dev', self.name, 'scope', 'link')
def get_gateway(self, scope=None, filters=None):
if filters is None:
filters = []
retval = None
if scope:
filters += ['scope', scope]
route_list_lines = self._run('list', 'dev', self.name,
*filters).split('\n')
default_route_line = next((x.strip() for x in
route_list_lines if
x.strip().startswith('default')), None)
if default_route_line:
gateway_index = 2
parts = default_route_line.split()
retval = dict(gateway=parts[gateway_index])
if 'metric' in parts:
metric_index = parts.index('metric') + 1
retval.update(metric=int(parts[metric_index]))
return retval
def pullup_route(self, interface_name):
"""Ensures that the route entry for the interface is before all
others on the same subnet.
"""
device_list = []
device_route_list_lines = self._run('list', 'proto', 'kernel',
'dev', interface_name).split('\n')
for device_route_line in device_route_list_lines:
try:
subnet = device_route_line.split()[0]
except Exception:
continue
subnet_route_list_lines = self._run('list', 'proto', 'kernel',
'match', subnet).split('\n')
for subnet_route_line in subnet_route_list_lines:
i = iter(subnet_route_line.split())
while(i.next() != 'dev'):
pass
device = i.next()
try:
while(i.next() != 'src'):
pass
src = i.next()
except Exception:
src = ''
if device != interface_name:
device_list.append((device, src))
else:
break
for (device, src) in device_list:
self._as_root('del', subnet, 'dev', device)
if (src != ''):
self._as_root('append', subnet, 'proto', 'kernel',
'src', src, 'dev', device)
else:
self._as_root('append', subnet, 'proto', 'kernel',
'dev', device)
def add_route(self, cidr, ip, table=None):
args = ['replace', cidr, 'via', ip, 'dev', self.name]
if table:
args += ['table', table]
self._as_root(*args)
def delete_route(self, cidr, ip, table=None):
args = ['del', cidr, 'via', ip, 'dev', self.name]
if table:
args += ['table', table]
self._as_root(*args)
class IpNeighCommand(IpDeviceCommandBase):
COMMAND = 'neigh'
def add(self, ip_version, ip_address, mac_address):
self._as_root('replace',
ip_address,
'lladdr',
mac_address,
'nud',
'permanent',
'dev',
self.name,
options=[ip_version])
def delete(self, ip_version, ip_address, mac_address):
self._as_root('del',
ip_address,
'lladdr',
mac_address,
'dev',
self.name,
options=[ip_version])
class IpNetnsCommand(IpCommandBase):
COMMAND = 'netns'
def add(self, name):
self._as_root('add', name, use_root_namespace=True)
wrapper = IPWrapper(self._parent.root_helper, name)
wrapper.netns.execute(['sysctl', '-w',
'net.ipv4.conf.all.promote_secondaries=1'])
return wrapper
def delete(self, name):
self._as_root('delete', name, use_root_namespace=True)
def execute(self, cmds, addl_env=None, check_exit_code=True,
extra_ok_codes=None):
ns_params = []
if self._parent.namespace:
self._parent.enforce_root_helper()
ns_params = ['ip', 'netns', 'exec', self._parent.namespace]
env_params = []
if addl_env:
env_params = (['env'] +
['%s=%s' % pair for pair in addl_env.items()])
return utils.execute(
ns_params + env_params + list(cmds),
root_helper=self._parent.root_helper,
check_exit_code=check_exit_code, extra_ok_codes=extra_ok_codes)
def exists(self, name):
output = self._parent._execute('o', 'netns', ['list'])
for line in output.split('\n'):
if name == line.strip():
return True
return False
def device_exists(device_name, root_helper=None, namespace=None):
"""Return True if the device exists in the namespace."""
try:
dev = IPDevice(device_name, root_helper, namespace)
dev.set_log_fail_as_error(False)
address = dev.link.address
except RuntimeError:
return False
return bool(address)
def device_exists_with_ip_mac(device_name, ip_cidr, mac, namespace=None,
root_helper=None):
"""Return True if the device with the given IP and MAC addresses
exists in the namespace.
"""
try:
device = IPDevice(device_name, root_helper, namespace)
if mac != device.link.address:
return False
if ip_cidr not in (ip['cidr'] for ip in device.addr.list()):
return False
except RuntimeError:
return False
else:
return True
def ensure_device_is_ready(device_name, root_helper=None, namespace=None):
dev = IPDevice(device_name, root_helper, namespace)
dev.set_log_fail_as_error(False)
try:
# Ensure the device is up, even if it is already up. If the device
# doesn't exist, a RuntimeError will be raised.
dev.link.set_up()
except RuntimeError:
return False
return True
def iproute_arg_supported(command, arg, root_helper=None):
command += ['help']
stdout, stderr = utils.execute(command, root_helper=root_helper,
check_exit_code=False, return_stderr=True)
return any(arg in line for line in stderr.split('\n'))<|fim▁end|>
|
# Copyright 2012 OpenStack Foundation
|
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>"""
Configuration for the ``student`` Django application.
"""
from __future__ import absolute_import
from django.apps import AppConfig
from django.contrib.auth.signals import user_logged_in
class StudentConfig(AppConfig):
"""
Default configuration for the ``student`` application.
"""
name = 'student'
def ready(self):<|fim▁hole|><|fim▁end|>
|
from django.contrib.auth.models import update_last_login as django_update_last_login
user_logged_in.disconnect(django_update_last_login)
from .signals.receivers import update_last_login
user_logged_in.connect(update_last_login)
|
<|file_name|>CachedProperty.py<|end_file_name|><|fim▁begin|># NOTE: this should inherit from (object) to function correctly with python 2.7
class CachedProperty(object):
""" A property that is only computed once per instance and
then stores the result in _cached_properties of the object.
Source: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
"""
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
propname = self.func.__name__
if not hasattr(obj, '_cached_properties'):
obj._cached_properties = {}
if propname not in obj._cached_properties:
obj._cached_properties[propname] = self.func(obj)
# value = obj.__dict__[propname] = self.func(obj)
return obj._cached_properties[propname]
@staticmethod
def clear(obj):
"""clears cache of obj"""
if hasattr(obj, '_cached_properties'):
obj._cached_properties = {}
@staticmethod
def is_cached(obj, propname):
if hasattr(obj, '_cached_properties') and propname in obj._cached_properties:<|fim▁hole|><|fim▁end|>
|
return True
else:
return False
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Copyright (c) 2012-2016 Ben Croston
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,<|fim▁hole|>OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from RPi._GPIO import *
VERSION = '0.6.3'<|fim▁end|>
| |
<|file_name|>angles.py<|end_file_name|><|fim▁begin|>"""Angles and anomalies.
"""
from astropy import units as u
from poliastro.core.angles import (
D_to_M as D_to_M_fast,
D_to_nu as D_to_nu_fast,
E_to_M as E_to_M_fast,
E_to_nu as E_to_nu_fast,
F_to_M as F_to_M_fast,
F_to_nu as F_to_nu_fast,
M_to_D as M_to_D_fast,
M_to_E as M_to_E_fast,
M_to_F as M_to_F_fast,
fp_angle as fp_angle_fast,
nu_to_D as nu_to_D_fast,
nu_to_E as nu_to_E_fast,
nu_to_F as nu_to_F_fast,
)
@u.quantity_input(D=u.rad)
def D_to_nu(D):
"""True anomaly from parabolic eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Eccentric anomaly.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (D_to_nu_fast(D.to_value(u.rad)) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad)
def nu_to_D(nu):
"""Parabolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
Returns
-------
D : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Notes
-----
Taken from Farnocchia, Davide, Davide Bracali Cioci, and Andrea Milani.
"Robust resolution of Kepler’s equation in all eccentricity regimes."
Celestial Mechanics and Dynamical Astronomy 116, no. 1 (2013): 21-34.
"""
return (nu_to_D_fast(nu.to_value(u.rad)) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_E(nu, ecc):
"""Eccentric anomaly from true anomaly.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (nu_to_E_fast(nu.to_value(u.rad), ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def nu_to_F(nu, ecc):
"""Hyperbolic eccentric anomaly from true anomaly.
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
Notes
-----
Taken from Curtis, H. (2013). *Orbital mechanics for engineering students*. 167
"""
return (nu_to_F_fast(nu.to_value(u.rad), ecc.value) * u.rad).to(nu.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_nu(E, ecc):
"""True anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (E_to_nu_fast(E.to_value(u.rad), ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_nu(F, ecc):
"""True anomaly from hyperbolic eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
nu : ~astropy.units.Quantity
True anomaly.
"""
return (F_to_nu_fast(F.to_value(u.rad), ecc.value) * u.rad).to(F.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
E : ~astropy.units.Quantity
Eccentric anomaly.
"""
return (M_to_E_fast(M.to_value(u.rad), ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_F(M, ecc):
"""Hyperbolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
Returns
-------
F : ~astropy.units.Quantity
Hyperbolic eccentric anomaly.
"""
return (M_to_F_fast(M.to_value(u.rad), ecc.value) * u.rad).to(M.unit)
@u.quantity_input(M=u.rad, ecc=u.one)
def M_to_D(M):
"""Parabolic eccentric anomaly from mean anomaly.
Parameters
----------
M : ~astropy.units.Quantity
Mean anomaly.
Returns
-------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
"""
return (M_to_D_fast(M.to_value(u.rad)) * u.rad).to(M.unit)
@u.quantity_input(E=u.rad, ecc=u.one)
def E_to_M(E, ecc):
"""Mean anomaly from eccentric anomaly.
.. versionadded:: 0.4.0
Parameters
----------
E : ~astropy.units.Quantity
Eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (E_to_M_fast(E.to_value(u.rad), ecc.value) * u.rad).to(E.unit)
@u.quantity_input(F=u.rad, ecc=u.one)
def F_to_M(F, ecc):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
F : ~astropy.units.Quantity<|fim▁hole|> Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (F_to_M_fast(F.to_value(u.rad), ecc.value) * u.rad).to(F.unit)
@u.quantity_input(D=u.rad, ecc=u.one)
def D_to_M(D):
"""Mean anomaly from eccentric anomaly.
Parameters
----------
D : ~astropy.units.Quantity
Parabolic eccentric anomaly.
Returns
-------
M : ~astropy.units.Quantity
Mean anomaly.
"""
return (D_to_M_fast(D.to_value(u.rad)) * u.rad).to(D.unit)
@u.quantity_input(nu=u.rad, ecc=u.one)
def fp_angle(nu, ecc):
"""Flight path angle.
.. versionadded:: 0.4.0
Parameters
----------
nu : ~astropy.units.Quantity
True anomaly.
ecc : ~astropy.units.Quantity
Eccentricity.
Notes
-----
Algorithm taken from Vallado 2007, pp. 113.
"""
return (fp_angle_fast(nu.to_value(u.rad), ecc.value) * u.rad).to(nu.unit)<|fim▁end|>
|
Hyperbolic eccentric anomaly.
ecc : ~astropy.units.Quantity
Eccentricity (>1).
|
<|file_name|>palette-sort.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gimpfu import *
# little known, colorsys is part of Python's stdlib
from colorsys import rgb_to_yiq
from textwrap import dedent
from random import randint
gettext.install("gimp20-python", gimp.locale_directory, unicode=True)
AVAILABLE_CHANNELS = (_("Red"), _("Green"), _("Blue"),
_("Luma (Y)"),
_("Hue"), _("Saturation"), _("Value"),
_("Saturation (HSL)"), _("Lightness (HSL)"),
_("Index"),
_("Random"))
GRAIN_SCALE = (1.0, 1.0 , 1.0,
1.0,
360., 100., 100.,
100., 100.,
16384.,
float(0x7ffffff),
100., 256., 256.,
256., 360.,)
SELECT_ALL = 0
SELECT_SLICE = 1
SELECT_AUTOSLICE = 2
SELECT_PARTITIONED = 3
SELECTIONS = (SELECT_ALL, SELECT_SLICE, SELECT_AUTOSLICE, SELECT_PARTITIONED)
def noop(v, i):
return v
def to_hsv(v, i):
return v.to_hsv()
def to_hsl(v, i):
return v.to_hsl()
def to_yiq(v, i):
return rgb_to_yiq(*v[:-1])
def to_index(v, i):
return (i,)
def to_random(v, i):
return (randint(0, 0x7fffffff),)
channel_getters = [ (noop, 0), (noop, 1), (noop, 2),
(to_yiq, 0),
(to_hsv, 0), (to_hsv, 1), (to_hsv, 2),
(to_hsl, 1), (to_hsl, 2),
(to_index, 0),
(to_random, 0)]
try:
from colormath.color_objects import RGBColor, LabColor, LCHabColor
AVAILABLE_CHANNELS = AVAILABLE_CHANNELS + (_("Lightness (LAB)"),
_("A-color"), _("B-color"),
_("Chroma (LCHab)"),
_("Hue (LCHab)"))
to_lab = lambda v,i: RGBColor(*v[:-1]).convert_to('LAB').get_value_tuple()
to_lchab = (lambda v,i:
RGBColor(*v[:-1]).convert_to('LCHab').get_value_tuple())
channel_getters.extend([(to_lab, 0), (to_lab, 1), (to_lab, 2),
(to_lchab, 1), (to_lchab, 2)])
except ImportError:
pass
def parse_slice(s, numcolors):
"""Parse a slice spec and return (start, nrows, length)
All items are optional. Omitting them makes the largest possible selection that
exactly fits the other items.
start:nrows,length
'' selects all items, as does ':'
':4,' makes a 4-row selection out of all colors (length auto-determined)
':4' also.
':1,4' selects the first 4 colors
':,4' selects rows of 4 colors (nrows auto-determined)
':4,4' selects 4 rows of 4 colors
'4:' selects a single row of all colors after 4, inclusive.
'4:,4' selects rows of 4 colors, starting at 4 (nrows auto-determined)
'4:4,4' selects 4 rows of 4 colors (16 colors total), beginning at index 4.
'4' is illegal (ambiguous)
In general, slices are comparable to a numpy sub-array.
'start at element START, with shape (NROWS, LENGTH)'
"""
s = s.strip()
def notunderstood():
raise ValueError('Slice %r not understood. Should be in format'
' START?:NROWS?,ROWLENGTH? eg. "0:4,16".' % s)
def _int(v):
try:
return int(v)
except ValueError:
notunderstood()
if s in ('', ':', ':,'):
return 0, 1, numcolors # entire palette, one row
if s.count(':') != 1:
notunderstood()
rowpos = s.find(':')
start = 0
if rowpos > 0:
start = _int(s[:rowpos])
numcolors -= start
nrows = 1
if ',' in s:
commapos = s.find(',')
nrows = s[rowpos+1:commapos]
length = s[commapos+1:]
if not nrows:
if not length:
notunderstood()
else:
length = _int(length)
if length == 0:
notunderstood()
nrows = numcolors // length
if numcolors % length:
nrows = -nrows
elif not length:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
else:
nrows = _int(nrows)
if nrows == 0:
notunderstood()
length = _int(length)
if length == 0:
notunderstood()
else:
nrows = _int(s[rowpos+1:])
if nrows == 0:
notunderstood()
length = numcolors // nrows
if numcolors % nrows:
length = -length
return start, nrows, length
def quantization_grain(channel, g):
"Given a channel and a quantization, return the size of a quantization grain"
g = max(1.0, g)
if g <= 1.0:
g = 0.00001
else:
g = max(0.00001, GRAIN_SCALE[channel] / g)
return g
def palette_sort(palette, selection, slice_expr, channel1, ascending1,
channel2, ascending2, quantize, pchannel, pquantize):
grain1 = quantization_grain(channel1, quantize)
grain2 = quantization_grain(channel2, quantize)
pgrain = quantization_grain(pchannel, pquantize)
#If palette is read only, work on a copy:
editable = pdb.gimp_palette_is_editable(palette)
if not editable:
palette = pdb.gimp_palette_duplicate (palette)
num_colors = pdb.gimp_palette_get_info (palette)
start, nrows, length = None, None, None
if selection == SELECT_AUTOSLICE:
def find_index(color, startindex=0):
for i in range(startindex, num_colors):
c = pdb.gimp_palette_entry_get_color (palette, i)
if c == color:
return i
return None
def hexcolor(c):
return "#%02x%02x%02x" % tuple(c[:-1])
fg = pdb.gimp_context_get_foreground()
bg = pdb.gimp_context_get_background()
start = find_index(fg)
end = find_index(bg)
if start is None:
raise ValueError("Couldn't find foreground color %r in palette" % list(fg))
if end is None:
raise ValueError("Couldn't find background color %r in palette" % list(bg))
if find_index(fg, start + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(fg))
if find_index(bg, end + 1):
raise ValueError('Autoslice cannot be used when more than one'
' instance of an endpoint'
' (%s) is present' % hexcolor(bg))
if start > end:
end, start = start, end
length = (end - start) + 1
try:
_, nrows, _ = parse_slice(slice_expr, length)
nrows = abs(nrows)
if length % nrows:
raise ValueError('Total length %d not evenly divisible'
' by number of rows %d' % (length, nrows))
length /= nrows
except ValueError:
# bad expression is okay here, just assume one row
nrows = 1
# remaining behaviour is implemented by SELECT_SLICE 'inheritance'.
selection= SELECT_SLICE
elif selection in (SELECT_SLICE, SELECT_PARTITIONED):
start, nrows, length = parse_slice(slice_expr, num_colors)
channels_getter_1, channel_index = channel_getters[channel1]
channels_getter_2, channel2_index = channel_getters[channel2]
def get_colors(start, end):
result = []
for i in range(start, end):
entry = (pdb.gimp_palette_entry_get_name (palette, i),
pdb.gimp_palette_entry_get_color (palette, i))
index1 = channels_getter_1(entry[1], i)[channel_index]
index2 = channels_getter_2(entry[1], i)[channel2_index]
index = ((index1 - (index1 % grain1)) * (1 if ascending1 else -1),
(index2 - (index2 % grain2)) * (1 if ascending2 else -1)
)
result.append((index, entry))
return result
if selection == SELECT_ALL:
entry_list = get_colors(0, num_colors)
entry_list.sort(key=lambda v:v[0])
for i in range(num_colors):
pdb.gimp_palette_entry_set_name (palette, i, entry_list[i][1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry_list[i][1][1])
elif selection == SELECT_PARTITIONED:
if num_colors < (start + length * nrows) - 1:
raise ValueError('Not enough entries in palette to '
'sort complete rows! Got %d, expected >=%d' %
(num_colors, start + length * nrows))
pchannels_getter, pchannel_index = channel_getters[pchannel]
for row in range(nrows):
partition_spans = [1]
rowstart = start + (row * length)
old_color = pdb.gimp_palette_entry_get_color (palette,
rowstart)
old_partition = pchannels_getter(old_color, rowstart)[pchannel_index]
old_partition = old_partition - (old_partition % pgrain)
for i in range(rowstart + 1, rowstart + length):
this_color = pdb.gimp_palette_entry_get_color (palette, i)
this_partition = pchannels_getter(this_color, i)[pchannel_index]
this_partition = this_partition - (this_partition % pgrain)
if this_partition == old_partition:
partition_spans[-1] += 1
else:
partition_spans.append(1)
old_partition = this_partition
base = rowstart
for size in partition_spans:
palette_sort(palette, SELECT_SLICE, '%d:1,%d' % (base, size),
channel, quantize, ascending, 0, 1.0)
base += size
else:
stride = length
if num_colors < (start + stride * nrows) - 1:
raise ValueError('Not enough entries in palette to sort '
'complete rows! Got %d, expected >=%d' %
(num_colors, start + stride * nrows))
for row_start in range(start, start + stride * nrows, stride):
sublist = get_colors(row_start, row_start + stride)<|fim▁hole|> sublist.sort(key=lambda v:v[0], reverse=not ascending)
for i, entry in zip(range(row_start, row_start + stride), sublist):
pdb.gimp_palette_entry_set_name (palette, i, entry[1][0])
pdb.gimp_palette_entry_set_color (palette, i, entry[1][1])
return palette
register(
"python-fu-palette-sort",
N_("Sort the colors in a palette"),
# FIXME: Write humanly readable help -
# (I can't figure out what the plugin does, or how to use the parameters after
# David's enhacements even looking at the code -
# let alone someone just using GIMP (JS) )
dedent("""\
palette_sort (palette, selection, slice_expr, channel,
channel2, quantize, ascending, pchannel, pquantize) -> new_palette
Sorts a palette, or part of a palette, using several options.
One can select two color channels over which to sort,
and several auxiliary parameters create a 2D sorted
palette with sorted rows, among other things.
One can optionally install colormath
(https://pypi.python.org/pypi/colormath/1.0.8)
to GIMP's Python to get even more channels to choose from.
"""),
"João S. O. Bueno, Carol Spears, David Gowers",
"João S. O. Bueno, Carol Spears, David Gowers",
"2006-2014",
N_("_Sort Palette..."),
"",
[
(PF_PALETTE, "palette", _("Palette"), ""),
(PF_OPTION, "selections", _("Se_lections"), SELECT_ALL,
(_("All"), _("Slice / Array"), _("Autoslice (fg->bg)"),
_("Partitioned"))),
(PF_STRING, "slice-expr", _("Slice _expression"), ''),
(PF_OPTION, "channel1", _("Channel to _sort"), 3,
AVAILABLE_CHANNELS),
(PF_BOOL, "ascending1", _("_Ascending"), True),
(PF_OPTION, "channel2", _("Secondary Channel to s_ort"), 5,
AVAILABLE_CHANNELS),
(PF_BOOL, "ascending2", _("_Ascending"), True),
(PF_FLOAT, "quantize", _("_Quantization"), 0.0),
(PF_OPTION, "pchannel", _("_Partitioning channel"), 3,
AVAILABLE_CHANNELS),
(PF_FLOAT, "pquantize", _("Partition q_uantization"), 0.0),
],
[],
palette_sort,
menu="<Palettes>",
domain=("gimp20-python", gimp.locale_directory)
)
main ()<|fim▁end|>
| |
<|file_name|>migrate.js<|end_file_name|><|fim▁begin|>var assert = require("chai").assert;
var Init = require("truffle-init");
var Migrate = require("truffle-migrate");
var Contracts = require("../lib/contracts");
var Networks = require("../lib/networks");
var path = require("path");
var fs = require("fs");
var TestRPC = require("ethereumjs-testrpc");
var Resolver = require("truffle-resolver");
var Artifactor = require("truffle-artifactor");
var Web3 = require("web3");
describe("migrate", function() {
var config;
var accounts;
var network_id_one;
var network_id_two;
var from_addresses = [];
before("Create a sandbox", function(done) {
this.timeout(10000);
Init.sandbox(function(err, result) {
if (err) return done(err);
config = result;
config.resolver = new Resolver(config);
config.artifactor = new Artifactor(config.contracts_build_directory);
config.networks = {};
done();
});
});
function createProviderAndSetNetworkConfig(network, callback) {
var provider = TestRPC.provider({seed: network});
var web3 = new Web3(provider);
web3.eth.getAccounts(function(err, accs) {
if (err) return callback(err);
web3.version.getNetwork(function(err, network_id) {
if (err) return callback(err);
config.networks[network] = {
provider: provider,
network_id: network_id + "",
from: accs[0]
};
callback();<|fim▁hole|>
before("Get accounts and network id of network one", function(done) {
createProviderAndSetNetworkConfig("primary", done);
});
before("Get accounts and network id of network one", function(done) {
createProviderAndSetNetworkConfig("secondary", done);
});
it('profiles a new project as not having any contracts deployed', function(done) {
Networks.deployed(config, function(err, networks) {
if (err) return done(err);
assert.equal(Object.keys(networks).length, 2, "Should have results for two networks from profiler");
assert.equal(Object.keys(networks["primary"]), 0, "Primary network should not have been deployed to");
assert.equal(Object.keys(networks["secondary"]), 0, "Secondary network should not have been deployed to");
done();
})
});
it('links libraries in initial project, and runs all migrations', function(done) {
this.timeout(10000);
config.network = "primary";
Contracts.compile(config.with({
all: false,
quiet: true
}), function(err, contracts) {
if (err) return done(err);
Migrate.run(config.with({
quiet: true
}), function(err) {
if (err) return done(err);
Networks.deployed(config, function(err, networks) {
if (err) return done(err);
assert.equal(Object.keys(networks).length, 2, "Should have results for two networks from profiler");
assert.equal(Object.keys(networks["primary"]).length, 3, "Primary network should have three contracts deployed");
assert.isNotNull(networks["primary"]["MetaCoin"], "MetaCoin contract should have an address");
assert.isNotNull(networks["primary"]["ConvertLib"], "ConvertLib library should have an address");
assert.isNotNull(networks["primary"]["Migrations"], "Migrations contract should have an address");
assert.equal(Object.keys(networks["secondary"]), 0, "Secondary network should not have been deployed to");
done();
});
});
});
});
it('should migrate secondary network without altering primary network', function(done) {
this.timeout(10000);
config.network = "secondary";
var currentAddresses = {};
Networks.deployed(config, function(err, networks) {
if (err) return done(err);
["MetaCoin", "ConvertLib", "Migrations"].forEach(function(contract_name) {
currentAddresses[contract_name] = networks["primary"][contract_name];
});
Migrate.run(config.with({
quiet: true
}), function(err, contracts) {
if (err) return done(err);
Networks.deployed(config, function(err, networks) {
if (err) return done(err);
assert.equal(Object.keys(networks).length, 2, "Should have results for two networks from profiler");
assert.equal(Object.keys(networks["primary"]).length, 3, "Primary network should have three contracts deployed");
assert.equal(networks["primary"]["MetaCoin"], currentAddresses["MetaCoin"], "MetaCoin contract updated on primary network");
assert.equal(networks["primary"]["ConvertLib"], currentAddresses["ConvertLib"], "ConvertLib library updated on primary network");
assert.equal(networks["primary"]["Migrations"], currentAddresses["Migrations"], "Migrations contract updated on primary network");
assert.equal(Object.keys(networks["secondary"]).length, 3, "Secondary network should have three contracts deployed");
assert.isNotNull(networks["secondary"]["MetaCoin"], "MetaCoin contract should have an address on secondary network");
assert.isNotNull(networks["secondary"]["ConvertLib"], "ConvertLib library should have an address on secondary network");
assert.isNotNull(networks["secondary"]["Migrations"], "Migrations contract should have an address on secondary network");
Object.keys(networks["primary"]).forEach(function(contract_name) {
assert.notEqual(networks["secondary"][contract_name], networks["primary"][contract_name], "Contract " + contract_name + " has the same address on both networks")
});
done();
});
});
});
});
it("should ignore files that don't start with a number", function(done) {
fs.writeFileSync(path.join(config.migrations_directory, "~2_deploy_contracts.js"), "module.exports = function() {};", "utf8");
Migrate.assemble(config, function(err, migrations) {
if (err) return done(err);
assert.equal(migrations.length, 2, "~2_deploy_contracts.js should have been ignored!");
done();
});
});
it("should ignore non-js extensions", function(done) {
fs.writeFileSync(path.join(config.migrations_directory, "2_deploy_contracts.js~"), "module.exports = function() {};", "utf8");
Migrate.assemble(config, function(err, migrations) {
if (err) return done(err);
assert.equal(migrations.length, 2, "2_deploy_contracts.js~ should have been ignored!");
done();
});
});
});<|fim▁end|>
|
});
});
};
|
<|file_name|>requestlog.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import mozhttpd
import urllib2
import os
import unittest
import mozunit
here = os.path.dirname(os.path.abspath(__file__))
class RequestLogTest(unittest.TestCase):<|fim▁hole|> httpd.start(block=False)
url = "http://%s:%s/" % ('127.0.0.1', httpd.httpd.server_port)
f = urllib2.urlopen(url)
f.read()
return httpd.request_log
def test_logging_enabled(self):
request_log = self.check_logging(log_requests=True)
self.assertEqual(len(request_log), 1)
log_entry = request_log[0]
self.assertEqual(log_entry['method'], 'GET')
self.assertEqual(log_entry['path'], '/')
self.assertEqual(type(log_entry['time']), float)
def test_logging_disabled(self):
request_log = self.check_logging(log_requests=False)
self.assertEqual(len(request_log), 0)
if __name__ == '__main__':
mozunit.main()<|fim▁end|>
|
def check_logging(self, log_requests=False):
httpd = mozhttpd.MozHttpd(port=0, docroot=here, log_requests=log_requests)
|
<|file_name|>block_depth.py<|end_file_name|><|fim▁begin|>"""
Block Depth Transformer
"""
from __future__ import absolute_import
from openedx.core.djangoapps.content.block_structure.transformer import BlockStructureTransformer<|fim▁hole|> """
Keep track of the depth of each block within the block structure. In case
of multiple paths to a given node (in a DAG), use the shallowest depth.
"""
WRITE_VERSION = 1
READ_VERSION = 1
BLOCK_DEPTH = 'block_depth'
def __init__(self, requested_depth=None):
self.requested_depth = requested_depth
@classmethod
def name(cls):
return "blocks_api:block_depth"
@classmethod
def get_block_depth(cls, block_structure, block_key):
"""
Return the precalculated depth of a block within the block_structure:
Arguments:
block_structure: a BlockStructure instance
block_key: the key of the block whose depth we want to know
Returns:
int
"""
return block_structure.get_transformer_block_field(
block_key,
cls,
cls.BLOCK_DEPTH,
)
def transform(self, usage_info, block_structure):
"""
Mutates block_structure based on the given usage_info.
"""
for block_key in block_structure.topological_traversal():
parents = block_structure.get_parents(block_key)
if parents:
block_depth = min(
self.get_block_depth(block_structure, parent_key)
for parent_key in parents
) + 1
else:
block_depth = 0
block_structure.set_transformer_block_field(
block_key,
self,
self.BLOCK_DEPTH,
block_depth
)
if self.requested_depth is not None:
block_structure.remove_block_traversal(
lambda block_key: self.get_block_depth(block_structure, block_key) > self.requested_depth
)<|fim▁end|>
|
class BlockDepthTransformer(BlockStructureTransformer):
|
<|file_name|>hints_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for checks."""
from grr.lib import flags
from grr.lib import test_lib
from grr.lib.checks import hints
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import config_file as rdf_config_file
from grr.lib.rdfvalues import protodict as rdf_protodict
class HintsTests(test_lib.GRRBaseTest):<|fim▁hole|> """Overlay(hint1, hint2) should populate hint2 with the values of hint1."""
# Fully populated hint.
full = {
"problem": "Terminator needs trousers.\n",
"fix": "Give me your clothes.\n",
"format": "{mission}, {target}\n",
"summary": "I'll be back."
}
# Partial hint
partial = {
"problem": "Terminator needs to go shopping.",
"fix": "Phased plasma rifle in the 40-watt range.",
"format": "",
"summary": ""
}
# Partial overlaid with full.
overlay = {
"problem": "Terminator needs to go shopping.",
"fix": "Phased plasma rifle in the 40-watt range.",
"format": "{mission}, {target}",
"summary": "I'll be back."
}
# Empty hint.
empty = {"problem": "", "fix": "", "format": "", "summary": ""}
# Empty hint should not clobber populated hint.
starts_full = full.copy()
starts_empty = empty.copy()
hints.Overlay(starts_full, starts_empty)
self.assertDictEqual(full, starts_full)
self.assertDictEqual(empty, starts_empty)
# Populate empty hint from partially populated hint.
starts_partial = partial.copy()
starts_empty = empty.copy()
hints.Overlay(starts_empty, starts_partial)
self.assertDictEqual(partial, starts_partial)
self.assertDictEqual(partial, starts_empty)
# Overlay the full and partial hints to get the hybrid.
starts_full = full.copy()
starts_partial = partial.copy()
hints.Overlay(starts_partial, starts_full)
self.assertDictEqual(full, starts_full)
self.assertDictEqual(overlay, starts_partial)
def testRdfFormatter(self):
"""Hints format RDF values with arbitrary values and attributes."""
# Create a complex RDF value
rdf = rdf_client.ClientSummary()
rdf.system_info.system = "Linux"
rdf.system_info.node = "coreai.skynet.com"
# Users (repeated)
rdf.users = [rdf_client.User(username=u) for u in ("root", "jconnor")]
# Interface (nested, repeated)
addresses = [
rdf_client.NetworkAddress(human_readable=a)
for a in ("1.1.1.1", "2.2.2.2", "3.3.3.3")
]
eth0 = rdf_client.Interface(ifname="eth0", addresses=addresses[:2])
ppp0 = rdf_client.Interface(ifname="ppp0", addresses=addresses[2])
rdf.interfaces = [eth0, ppp0]
template = ("{system_info.system} {users.username} {interfaces.ifname} "
"{interfaces.addresses.human_readable}\n")
hinter = hints.Hinter(template=template)
expected = "Linux root,jconnor eth0,ppp0 1.1.1.1,2.2.2.2,3.3.3.3"
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def testRdfFormatterHandlesKeyValuePair(self):
"""rdfvalue.KeyValue items need special handling to expand k and v."""
key = rdf_protodict.DataBlob().SetValue("skynet")
value = rdf_protodict.DataBlob().SetValue([1997])
rdf = rdf_protodict.KeyValue(k=key, v=value)
template = "{k}: {v}"
hinter = hints.Hinter(template=template)
expected = "skynet: 1997"
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def testRdfFormatterAttributedDict(self):
sshd = rdf_config_file.SshdConfig()
sshd.config = rdf_protodict.AttributedDict(skynet="operational")
template = "{config.skynet}"
hinter = hints.Hinter(template=template)
expected = "operational"
result = hinter.Render(sshd)
self.assertEqual(expected, result)
def testRdfFormatterFanOut(self):
rdf = rdf_protodict.Dict()
user1 = rdf_client.User(username="drexler")
user2 = rdf_client.User(username="joy")
rdf["cataclysm"] = "GreyGoo"
rdf["thinkers"] = [user1, user2]
rdf["reference"] = {
"ecophage": ["bots", ["nanobots", ["picobots"]]],
"doomsday": {
"books": ["cats cradle", "prey"]
}
}
template = ("{cataclysm}; {thinkers.username}; {reference.ecophage}; "
"{reference.doomsday}\n")
hinter = hints.Hinter(template=template)
expected = ("GreyGoo; drexler,joy; bots,nanobots,picobots; "
"books:cats cradle,prey")
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def testStatModeFormat(self):
rdf = rdf_client.StatEntry(st_mode=33204)
expected = "-rw-rw-r--"
template = "{st_mode}"
hinter = hints.Hinter(template=template)
result = hinter.Render(rdf)
self.assertEqual(expected, result)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)<|fim▁end|>
|
"""Test hint operations."""
def testCheckOverlay(self):
|
<|file_name|>ArrayFilterIterator.java<|end_file_name|><|fim▁begin|>/*
* The MIT License
*
* Copyright 2016 njacinto.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/<|fim▁hole|>
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.function.Predicate;
import org.nfpj.utils.predicates.TruePredicate;
/**
*
* @author njacinto
* @param <T> the type of object being returned by this iterator
*/
public class ArrayFilterIterator<T> implements Iterator<T> {
protected static final int END_OF_ITERATION = -2;
//
private int nextIndex;
//
protected final T[] array;
protected final Predicate<T> predicate;
// <editor-fold defaultstate="expanded" desc="Constructors">
/**
* Creates an instance of this class
*
* @param array the array from where this instance will extract the elements
* @param predicate the filter to be applied to the elements
*/
public ArrayFilterIterator(T[] array, Predicate<T> predicate) {
this(array, predicate, -1);
}
/**
*
* @param array
* @param predicate
* @param prevIndex
*/
protected ArrayFilterIterator(T[] array, Predicate<T> predicate, int prevIndex) {
this.array = array!=null ? array : ArrayUtil.empty();
this.predicate = predicate!=null ? predicate : TruePredicate.getInstance();
this.nextIndex = getNextIndex(prevIndex);
}
// </editor-fold>
// <editor-fold defaultstate="expanded" desc="Public methods">
/**
* {@inheritDoc}
*/
@Override
public boolean hasNext() {
return nextIndex != END_OF_ITERATION;
}
/**
* {@inheritDoc}
*/
@Override
public T next() {
if(nextIndex==END_OF_ITERATION){
throw new NoSuchElementException("The underline collection has no elements.");
}
int index = nextIndex;
nextIndex = getNextIndex(nextIndex);
return array[index];
}
/**
* {@inheritDoc}
*/
@Override
public void remove() {
throw new UnsupportedOperationException("The iterator doesn't allow changes.");
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Protected methods">
/**
* Searches for the next element that matches the filtering conditions and
* returns it.
*
* @param currIndex
* @return the next element that matches the filtering conditions or null
* if no more elements are available
*/
protected int getNextIndex(int currIndex){
if(currIndex!=END_OF_ITERATION){
for(int i=currIndex+1; i<array.length; i++){
if(predicate.test(array[i])){
return i;
}
}
}
return END_OF_ITERATION;
}
// </editor-fold>
}<|fim▁end|>
|
package org.nfpj.utils.arrays;
|
<|file_name|>test_action_defaults.py<|end_file_name|><|fim▁begin|># Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
import requests
from mistral.actions import std_actions
from mistral.db.v2 import api as db_api
from mistral.services import workflows as wf_service
from mistral.tests.unit import base as test_base
from mistral.tests.unit.engine import base
from mistral.workflow import states
# Use the set_default method to set value otherwise in certain test cases
# the change in value is not permanent.
cfg.CONF.set_default('auth_enable', False, group='pecan')
ENV = {
'__actions': {
'std.http': {
'auth': 'librarian:password123',
'timeout': 30,
}
}
}
EXPECTED_ENV_AUTH = ('librarian', 'password123')
WORKFLOW1 = """
---
version: "2.0"
wf1:
type: direct
tasks:
task1:
action: std.http url="https://api.library.org/books"
publish:
result: <% $ %>
"""
WORKFLOW2 = """
---
version: "2.0"
wf2:
type: direct
tasks:
task1:
action: std.http url="https://api.library.org/books" timeout=60
publish:
result: <% $ %>
"""
WORKFLOW1_WITH_ITEMS = """<|fim▁hole|>wf1_with_items:
type: direct
input:
- links
tasks:
task1:
with-items: link in <% $.links %>
action: std.http url=<% $.link %>
publish:
result: <% $ %>
"""
WORKFLOW2_WITH_ITEMS = """
---
version: "2.0"
wf2_with_items:
type: direct
input:
- links
tasks:
task1:
with-items: link in <% $.links %>
action: std.http url=<% $.link %> timeout=60
publish:
result: <% $ %>
"""
class ActionDefaultTest(base.EngineTestCase):
@mock.patch.object(
requests, 'request',
mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK')))
@mock.patch.object(
std_actions.HTTPAction, 'is_sync',
mock.MagicMock(return_value=True))
def test_action_defaults_from_env(self):
wf_service.create_workflows(WORKFLOW1)
wf_ex = self.engine.start_workflow('wf1', env=ENV)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
self.assertEqual(states.SUCCESS, wf_ex.state)
self._assert_single_item(wf_ex.task_executions, name='task1')
requests.request.assert_called_with(
'GET', 'https://api.library.org/books',
params=None, data=None, headers=None, cookies=None,
allow_redirects=None, proxies=None, verify=None,
auth=EXPECTED_ENV_AUTH,
timeout=ENV['__actions']['std.http']['timeout'])
@mock.patch.object(
requests, 'request',
mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK')))
@mock.patch.object(
std_actions.HTTPAction, 'is_sync',
mock.MagicMock(return_value=True))
def test_action_defaults_from_env_not_applied(self):
wf_service.create_workflows(WORKFLOW2)
wf_ex = self.engine.start_workflow('wf2', env=ENV)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
self.assertEqual(states.SUCCESS, wf_ex.state)
self._assert_single_item(wf_ex.task_executions, name='task1')
requests.request.assert_called_with(
'GET', 'https://api.library.org/books',
params=None, data=None, headers=None, cookies=None,
allow_redirects=None, proxies=None, verify=None,
auth=EXPECTED_ENV_AUTH,
timeout=60
)
@mock.patch.object(
requests, 'request',
mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK')))
@mock.patch.object(
std_actions.HTTPAction, 'is_sync',
mock.MagicMock(return_value=True))
def test_with_items_action_defaults_from_env(self):
wf_service.create_workflows(WORKFLOW1_WITH_ITEMS)
wf_input = {
'links': [
'https://api.library.org/books',
'https://api.library.org/authors'
]
}
wf_ex = self.engine.start_workflow(
'wf1_with_items',
wf_input=wf_input,
env=ENV
)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
self.assertEqual(states.SUCCESS, wf_ex.state)
self._assert_single_item(wf_ex.task_executions, name='task1')
calls = [mock.call('GET', url, params=None, data=None,
headers=None, cookies=None,
allow_redirects=None, proxies=None,
auth=EXPECTED_ENV_AUTH, verify=None,
timeout=ENV['__actions']['std.http']['timeout'])
for url in wf_input['links']]
requests.request.assert_has_calls(calls, any_order=True)
@mock.patch.object(
requests, 'request',
mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK')))
@mock.patch.object(
std_actions.HTTPAction, 'is_sync',
mock.MagicMock(return_value=True))
def test_with_items_action_defaults_from_env_not_applied(self):
wf_service.create_workflows(WORKFLOW2_WITH_ITEMS)
wf_input = {
'links': [
'https://api.library.org/books',
'https://api.library.org/authors'
]
}
wf_ex = self.engine.start_workflow(
'wf2_with_items',
wf_input=wf_input,
env=ENV
)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
self.assertEqual(states.SUCCESS, wf_ex.state)
self._assert_single_item(wf_ex.task_executions, name='task1')
calls = [mock.call('GET', url, params=None, data=None,
headers=None, cookies=None,
allow_redirects=None, proxies=None,
auth=EXPECTED_ENV_AUTH, verify=None,
timeout=60)
for url in wf_input['links']]
requests.request.assert_has_calls(calls, any_order=True)<|fim▁end|>
|
---
version: "2.0"
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/// Debug
#[macro_use]
pub mod debug;
/// ELF File Support
pub mod elf;
/// Event input
pub mod event;
/// Get slice implementation
pub mod get_slice;
/// Kernel memory allocation
pub mod memory;
/// Paging (x86)
#[cfg(target_arch = "x86")]
#[path="paging-i386.rs"]
pub mod paging;
/// Paging (x86_64)
#[cfg(target_arch = "x86_64")]
#[path="paging-x86_64.rs"]
pub mod paging;
/// A module for parsing paths
pub mod parse_path;
/// A module for parsing IP related string<|fim▁hole|>pub mod random;
/// A module for time
pub mod time;
/// String to number
pub mod to_num;<|fim▁end|>
|
pub mod parse_ip;
/// A module for queues
pub mod queue;
/// A module for pseudorandom generator
|
<|file_name|>util.js<|end_file_name|><|fim▁begin|>/* -*- Mode: Java; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set shiftwidth=2 tabstop=2 autoindent cindent expandtab: */
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* globals Cmd, ColorSpace, Dict, MozBlobBuilder, Name, PDFJS, Ref, URL,
Promise */
'use strict';
var globalScope = (typeof window === 'undefined') ? this : window;
var isWorker = (typeof window === 'undefined');
var FONT_IDENTITY_MATRIX = [0.001, 0, 0, 0.001, 0, 0];
var TextRenderingMode = {
FILL: 0,
STROKE: 1,
FILL_STROKE: 2,
INVISIBLE: 3,
FILL_ADD_TO_PATH: 4,
STROKE_ADD_TO_PATH: 5,
FILL_STROKE_ADD_TO_PATH: 6,
ADD_TO_PATH: 7,
FILL_STROKE_MASK: 3,
ADD_TO_PATH_FLAG: 4
};
var ImageKind = {
GRAYSCALE_1BPP: 1,
RGB_24BPP: 2,
RGBA_32BPP: 3
};
var AnnotationType = {
WIDGET: 1,
TEXT: 2,
LINK: 3
};
var StreamType = {
UNKNOWN: 0,
FLATE: 1,
LZW: 2,
DCT: 3,
JPX: 4,
JBIG: 5,
A85: 6,
AHX: 7,
CCF: 8,
RL: 9
};
var FontType = {
UNKNOWN: 0,
TYPE1: 1,
TYPE1C: 2,
CIDFONTTYPE0: 3,
CIDFONTTYPE0C: 4,
TRUETYPE: 5,
CIDFONTTYPE2: 6,
TYPE3: 7,
OPENTYPE: 8,
TYPE0: 9,
MMTYPE1: 10
};
// The global PDFJS object exposes the API
// In production, it will be declared outside a global wrapper
// In development, it will be declared here
if (!globalScope.PDFJS) {
globalScope.PDFJS = {};
}
globalScope.PDFJS.pdfBug = false;
PDFJS.VERBOSITY_LEVELS = {
errors: 0,
warnings: 1,
infos: 5
};
// All the possible operations for an operator list.
var OPS = PDFJS.OPS = {
// Intentionally start from 1 so it is easy to spot bad operators that will be
// 0's.
dependency: 1,
setLineWidth: 2,
setLineCap: 3,
setLineJoin: 4,
setMiterLimit: 5,
setDash: 6,
setRenderingIntent: 7,
setFlatness: 8,
setGState: 9,
save: 10,
restore: 11,
transform: 12,
moveTo: 13,<|fim▁hole|> closePath: 18,
rectangle: 19,
stroke: 20,
closeStroke: 21,
fill: 22,
eoFill: 23,
fillStroke: 24,
eoFillStroke: 25,
closeFillStroke: 26,
closeEOFillStroke: 27,
endPath: 28,
clip: 29,
eoClip: 30,
beginText: 31,
endText: 32,
setCharSpacing: 33,
setWordSpacing: 34,
setHScale: 35,
setLeading: 36,
setFont: 37,
setTextRenderingMode: 38,
setTextRise: 39,
moveText: 40,
setLeadingMoveText: 41,
setTextMatrix: 42,
nextLine: 43,
showText: 44,
showSpacedText: 45,
nextLineShowText: 46,
nextLineSetSpacingShowText: 47,
setCharWidth: 48,
setCharWidthAndBounds: 49,
setStrokeColorSpace: 50,
setFillColorSpace: 51,
setStrokeColor: 52,
setStrokeColorN: 53,
setFillColor: 54,
setFillColorN: 55,
setStrokeGray: 56,
setFillGray: 57,
setStrokeRGBColor: 58,
setFillRGBColor: 59,
setStrokeCMYKColor: 60,
setFillCMYKColor: 61,
shadingFill: 62,
beginInlineImage: 63,
beginImageData: 64,
endInlineImage: 65,
paintXObject: 66,
markPoint: 67,
markPointProps: 68,
beginMarkedContent: 69,
beginMarkedContentProps: 70,
endMarkedContent: 71,
beginCompat: 72,
endCompat: 73,
paintFormXObjectBegin: 74,
paintFormXObjectEnd: 75,
beginGroup: 76,
endGroup: 77,
beginAnnotations: 78,
endAnnotations: 79,
beginAnnotation: 80,
endAnnotation: 81,
paintJpegXObject: 82,
paintImageMaskXObject: 83,
paintImageMaskXObjectGroup: 84,
paintImageXObject: 85,
paintInlineImageXObject: 86,
paintInlineImageXObjectGroup: 87,
paintImageXObjectRepeat: 88,
paintImageMaskXObjectRepeat: 89,
paintSolidColorImageMask: 90,
constructPath: 91
};
// A notice for devs. These are good for things that are helpful to devs, such
// as warning that Workers were disabled, which is important to devs but not
// end users.
function info(msg) {
if (PDFJS.verbosity >= PDFJS.VERBOSITY_LEVELS.infos) {
console.log('Info: ' + msg);
}
}
// Non-fatal warnings.
function warn(msg) {
if (PDFJS.verbosity >= PDFJS.VERBOSITY_LEVELS.warnings) {
console.log('Warning: ' + msg);
}
}
// Fatal errors that should trigger the fallback UI and halt execution by
// throwing an exception.
function error(msg) {
// If multiple arguments were passed, pass them all to the log function.
if (arguments.length > 1) {
var logArguments = ['Error:'];
logArguments.push.apply(logArguments, arguments);
console.log.apply(console, logArguments);
// Join the arguments into a single string for the lines below.
msg = [].join.call(arguments, ' ');
} else {
console.log('Error: ' + msg);
}
console.log(backtrace());
UnsupportedManager.notify(UNSUPPORTED_FEATURES.unknown);
throw new Error(msg);
}
function backtrace() {
try {
throw new Error();
} catch (e) {
return e.stack ? e.stack.split('\n').slice(2).join('\n') : '';
}
}
function assert(cond, msg) {
if (!cond) {
error(msg);
}
}
var UNSUPPORTED_FEATURES = PDFJS.UNSUPPORTED_FEATURES = {
unknown: 'unknown',
forms: 'forms',
javaScript: 'javaScript',
smask: 'smask',
shadingPattern: 'shadingPattern',
font: 'font'
};
var UnsupportedManager = PDFJS.UnsupportedManager =
(function UnsupportedManagerClosure() {
var listeners = [];
return {
listen: function (cb) {
listeners.push(cb);
},
notify: function (featureId) {
warn('Unsupported feature "' + featureId + '"');
for (var i = 0, ii = listeners.length; i < ii; i++) {
listeners[i](featureId);
}
}
};
})();
// Combines two URLs. The baseUrl shall be absolute URL. If the url is an
// absolute URL, it will be returned as is.
function combineUrl(baseUrl, url) {
if (!url) {
return baseUrl;
}
if (/^[a-z][a-z0-9+\-.]*:/i.test(url)) {
return url;
}
var i;
if (url.charAt(0) === '/') {
// absolute path
i = baseUrl.indexOf('://');
if (url.charAt(1) === '/') {
++i;
} else {
i = baseUrl.indexOf('/', i + 3);
}
return baseUrl.substring(0, i) + url;
} else {
// relative path
var pathLength = baseUrl.length;
i = baseUrl.lastIndexOf('#');
pathLength = i >= 0 ? i : pathLength;
i = baseUrl.lastIndexOf('?', pathLength);
pathLength = i >= 0 ? i : pathLength;
var prefixLength = baseUrl.lastIndexOf('/', pathLength);
return baseUrl.substring(0, prefixLength + 1) + url;
}
}
// Validates if URL is safe and allowed, e.g. to avoid XSS.
function isValidUrl(url, allowRelative) {
if (!url) {
return false;
}
// RFC 3986 (http://tools.ietf.org/html/rfc3986#section-3.1)
// scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
var protocol = /^[a-z][a-z0-9+\-.]*(?=:)/i.exec(url);
if (!protocol) {
return allowRelative;
}
protocol = protocol[0].toLowerCase();
switch (protocol) {
case 'http':
case 'https':
case 'ftp':
case 'mailto':
return true;
default:
return false;
}
}
PDFJS.isValidUrl = isValidUrl;
function shadow(obj, prop, value) {
Object.defineProperty(obj, prop, { value: value,
enumerable: true,
configurable: true,
writable: false });
return value;
}
var PasswordResponses = PDFJS.PasswordResponses = {
NEED_PASSWORD: 1,
INCORRECT_PASSWORD: 2
};
var PasswordException = (function PasswordExceptionClosure() {
function PasswordException(msg, code) {
this.name = 'PasswordException';
this.message = msg;
this.code = code;
}
PasswordException.prototype = new Error();
PasswordException.constructor = PasswordException;
return PasswordException;
})();
var UnknownErrorException = (function UnknownErrorExceptionClosure() {
function UnknownErrorException(msg, details) {
this.name = 'UnknownErrorException';
this.message = msg;
this.details = details;
}
UnknownErrorException.prototype = new Error();
UnknownErrorException.constructor = UnknownErrorException;
return UnknownErrorException;
})();
var InvalidPDFException = (function InvalidPDFExceptionClosure() {
function InvalidPDFException(msg) {
this.name = 'InvalidPDFException';
this.message = msg;
}
InvalidPDFException.prototype = new Error();
InvalidPDFException.constructor = InvalidPDFException;
return InvalidPDFException;
})();
var MissingPDFException = (function MissingPDFExceptionClosure() {
function MissingPDFException(msg) {
this.name = 'MissingPDFException';
this.message = msg;
}
MissingPDFException.prototype = new Error();
MissingPDFException.constructor = MissingPDFException;
return MissingPDFException;
})();
var NotImplementedException = (function NotImplementedExceptionClosure() {
function NotImplementedException(msg) {
this.message = msg;
}
NotImplementedException.prototype = new Error();
NotImplementedException.prototype.name = 'NotImplementedException';
NotImplementedException.constructor = NotImplementedException;
return NotImplementedException;
})();
var MissingDataException = (function MissingDataExceptionClosure() {
function MissingDataException(begin, end) {
this.begin = begin;
this.end = end;
this.message = 'Missing data [' + begin + ', ' + end + ')';
}
MissingDataException.prototype = new Error();
MissingDataException.prototype.name = 'MissingDataException';
MissingDataException.constructor = MissingDataException;
return MissingDataException;
})();
var XRefParseException = (function XRefParseExceptionClosure() {
function XRefParseException(msg) {
this.message = msg;
}
XRefParseException.prototype = new Error();
XRefParseException.prototype.name = 'XRefParseException';
XRefParseException.constructor = XRefParseException;
return XRefParseException;
})();
function bytesToString(bytes) {
var length = bytes.length;
var MAX_ARGUMENT_COUNT = 8192;
if (length < MAX_ARGUMENT_COUNT) {
return String.fromCharCode.apply(null, bytes);
}
var strBuf = [];
for (var i = 0; i < length; i += MAX_ARGUMENT_COUNT) {
var chunkEnd = Math.min(i + MAX_ARGUMENT_COUNT, length);
var chunk = bytes.subarray(i, chunkEnd);
strBuf.push(String.fromCharCode.apply(null, chunk));
}
return strBuf.join('');
}
function stringToBytes(str) {
var length = str.length;
var bytes = new Uint8Array(length);
for (var i = 0; i < length; ++i) {
bytes[i] = str.charCodeAt(i) & 0xFF;
}
return bytes;
}
function string32(value) {
return String.fromCharCode((value >> 24) & 0xff, (value >> 16) & 0xff,
(value >> 8) & 0xff, value & 0xff);
}
function log2(x) {
var n = 1, i = 0;
while (x > n) {
n <<= 1;
i++;
}
return i;
}
function readInt8(data, start) {
return (data[start] << 24) >> 24;
}
function readUint16(data, offset) {
return (data[offset] << 8) | data[offset + 1];
}
function readUint32(data, offset) {
return ((data[offset] << 24) | (data[offset + 1] << 16) |
(data[offset + 2] << 8) | data[offset + 3]) >>> 0;
}
// Lazy test the endianness of the platform
// NOTE: This will be 'true' for simulated TypedArrays
function isLittleEndian() {
var buffer8 = new Uint8Array(2);
buffer8[0] = 1;
var buffer16 = new Uint16Array(buffer8.buffer);
return (buffer16[0] === 1);
}
Object.defineProperty(PDFJS, 'isLittleEndian', {
configurable: true,
get: function PDFJS_isLittleEndian() {
return shadow(PDFJS, 'isLittleEndian', isLittleEndian());
}
});
//#if !(FIREFOX || MOZCENTRAL || B2G || CHROME)
//// Lazy test if the userAgant support CanvasTypedArrays
function hasCanvasTypedArrays() {
var canvas = document.createElement('canvas');
canvas.width = canvas.height = 1;
var ctx = canvas.getContext('2d');
var imageData = ctx.createImageData(1, 1);
return (typeof imageData.data.buffer !== 'undefined');
}
Object.defineProperty(PDFJS, 'hasCanvasTypedArrays', {
configurable: true,
get: function PDFJS_hasCanvasTypedArrays() {
return shadow(PDFJS, 'hasCanvasTypedArrays', hasCanvasTypedArrays());
}
});
var Uint32ArrayView = (function Uint32ArrayViewClosure() {
function Uint32ArrayView(buffer, length) {
this.buffer = buffer;
this.byteLength = buffer.length;
this.length = length === undefined ? (this.byteLength >> 2) : length;
ensureUint32ArrayViewProps(this.length);
}
Uint32ArrayView.prototype = Object.create(null);
var uint32ArrayViewSetters = 0;
function createUint32ArrayProp(index) {
return {
get: function () {
var buffer = this.buffer, offset = index << 2;
return (buffer[offset] | (buffer[offset + 1] << 8) |
(buffer[offset + 2] << 16) | (buffer[offset + 3] << 24)) >>> 0;
},
set: function (value) {
var buffer = this.buffer, offset = index << 2;
buffer[offset] = value & 255;
buffer[offset + 1] = (value >> 8) & 255;
buffer[offset + 2] = (value >> 16) & 255;
buffer[offset + 3] = (value >>> 24) & 255;
}
};
}
function ensureUint32ArrayViewProps(length) {
while (uint32ArrayViewSetters < length) {
Object.defineProperty(Uint32ArrayView.prototype,
uint32ArrayViewSetters,
createUint32ArrayProp(uint32ArrayViewSetters));
uint32ArrayViewSetters++;
}
}
return Uint32ArrayView;
})();
//#else
//PDFJS.hasCanvasTypedArrays = true;
//#endif
var IDENTITY_MATRIX = [1, 0, 0, 1, 0, 0];
var Util = PDFJS.Util = (function UtilClosure() {
function Util() {}
var rgbBuf = ['rgb(', 0, ',', 0, ',', 0, ')'];
// makeCssRgb() can be called thousands of times. Using |rgbBuf| avoids
// creating many intermediate strings.
Util.makeCssRgb = function Util_makeCssRgb(rgb) {
rgbBuf[1] = rgb[0];
rgbBuf[3] = rgb[1];
rgbBuf[5] = rgb[2];
return rgbBuf.join('');
};
// Concatenates two transformation matrices together and returns the result.
Util.transform = function Util_transform(m1, m2) {
return [
m1[0] * m2[0] + m1[2] * m2[1],
m1[1] * m2[0] + m1[3] * m2[1],
m1[0] * m2[2] + m1[2] * m2[3],
m1[1] * m2[2] + m1[3] * m2[3],
m1[0] * m2[4] + m1[2] * m2[5] + m1[4],
m1[1] * m2[4] + m1[3] * m2[5] + m1[5]
];
};
// For 2d affine transforms
Util.applyTransform = function Util_applyTransform(p, m) {
var xt = p[0] * m[0] + p[1] * m[2] + m[4];
var yt = p[0] * m[1] + p[1] * m[3] + m[5];
return [xt, yt];
};
Util.applyInverseTransform = function Util_applyInverseTransform(p, m) {
var d = m[0] * m[3] - m[1] * m[2];
var xt = (p[0] * m[3] - p[1] * m[2] + m[2] * m[5] - m[4] * m[3]) / d;
var yt = (-p[0] * m[1] + p[1] * m[0] + m[4] * m[1] - m[5] * m[0]) / d;
return [xt, yt];
};
// Applies the transform to the rectangle and finds the minimum axially
// aligned bounding box.
Util.getAxialAlignedBoundingBox =
function Util_getAxialAlignedBoundingBox(r, m) {
var p1 = Util.applyTransform(r, m);
var p2 = Util.applyTransform(r.slice(2, 4), m);
var p3 = Util.applyTransform([r[0], r[3]], m);
var p4 = Util.applyTransform([r[2], r[1]], m);
return [
Math.min(p1[0], p2[0], p3[0], p4[0]),
Math.min(p1[1], p2[1], p3[1], p4[1]),
Math.max(p1[0], p2[0], p3[0], p4[0]),
Math.max(p1[1], p2[1], p3[1], p4[1])
];
};
Util.inverseTransform = function Util_inverseTransform(m) {
var d = m[0] * m[3] - m[1] * m[2];
return [m[3] / d, -m[1] / d, -m[2] / d, m[0] / d,
(m[2] * m[5] - m[4] * m[3]) / d, (m[4] * m[1] - m[5] * m[0]) / d];
};
// Apply a generic 3d matrix M on a 3-vector v:
// | a b c | | X |
// | d e f | x | Y |
// | g h i | | Z |
// M is assumed to be serialized as [a,b,c,d,e,f,g,h,i],
// with v as [X,Y,Z]
Util.apply3dTransform = function Util_apply3dTransform(m, v) {
return [
m[0] * v[0] + m[1] * v[1] + m[2] * v[2],
m[3] * v[0] + m[4] * v[1] + m[5] * v[2],
m[6] * v[0] + m[7] * v[1] + m[8] * v[2]
];
};
// This calculation uses Singular Value Decomposition.
// The SVD can be represented with formula A = USV. We are interested in the
// matrix S here because it represents the scale values.
Util.singularValueDecompose2dScale =
function Util_singularValueDecompose2dScale(m) {
var transpose = [m[0], m[2], m[1], m[3]];
// Multiply matrix m with its transpose.
var a = m[0] * transpose[0] + m[1] * transpose[2];
var b = m[0] * transpose[1] + m[1] * transpose[3];
var c = m[2] * transpose[0] + m[3] * transpose[2];
var d = m[2] * transpose[1] + m[3] * transpose[3];
// Solve the second degree polynomial to get roots.
var first = (a + d) / 2;
var second = Math.sqrt((a + d) * (a + d) - 4 * (a * d - c * b)) / 2;
var sx = first + second || 1;
var sy = first - second || 1;
// Scale values are the square roots of the eigenvalues.
return [Math.sqrt(sx), Math.sqrt(sy)];
};
// Normalize rectangle rect=[x1, y1, x2, y2] so that (x1,y1) < (x2,y2)
// For coordinate systems whose origin lies in the bottom-left, this
// means normalization to (BL,TR) ordering. For systems with origin in the
// top-left, this means (TL,BR) ordering.
Util.normalizeRect = function Util_normalizeRect(rect) {
var r = rect.slice(0); // clone rect
if (rect[0] > rect[2]) {
r[0] = rect[2];
r[2] = rect[0];
}
if (rect[1] > rect[3]) {
r[1] = rect[3];
r[3] = rect[1];
}
return r;
};
// Returns a rectangle [x1, y1, x2, y2] corresponding to the
// intersection of rect1 and rect2. If no intersection, returns 'false'
// The rectangle coordinates of rect1, rect2 should be [x1, y1, x2, y2]
Util.intersect = function Util_intersect(rect1, rect2) {
function compare(a, b) {
return a - b;
}
// Order points along the axes
var orderedX = [rect1[0], rect1[2], rect2[0], rect2[2]].sort(compare),
orderedY = [rect1[1], rect1[3], rect2[1], rect2[3]].sort(compare),
result = [];
rect1 = Util.normalizeRect(rect1);
rect2 = Util.normalizeRect(rect2);
// X: first and second points belong to different rectangles?
if ((orderedX[0] === rect1[0] && orderedX[1] === rect2[0]) ||
(orderedX[0] === rect2[0] && orderedX[1] === rect1[0])) {
// Intersection must be between second and third points
result[0] = orderedX[1];
result[2] = orderedX[2];
} else {
return false;
}
// Y: first and second points belong to different rectangles?
if ((orderedY[0] === rect1[1] && orderedY[1] === rect2[1]) ||
(orderedY[0] === rect2[1] && orderedY[1] === rect1[1])) {
// Intersection must be between second and third points
result[1] = orderedY[1];
result[3] = orderedY[2];
} else {
return false;
}
return result;
};
Util.sign = function Util_sign(num) {
return num < 0 ? -1 : 1;
};
Util.appendToArray = function Util_appendToArray(arr1, arr2) {
Array.prototype.push.apply(arr1, arr2);
};
Util.prependToArray = function Util_prependToArray(arr1, arr2) {
Array.prototype.unshift.apply(arr1, arr2);
};
Util.extendObj = function extendObj(obj1, obj2) {
for (var key in obj2) {
obj1[key] = obj2[key];
}
};
Util.getInheritableProperty = function Util_getInheritableProperty(dict,
name) {
while (dict && !dict.has(name)) {
dict = dict.get('Parent');
}
if (!dict) {
return null;
}
return dict.get(name);
};
Util.inherit = function Util_inherit(sub, base, prototype) {
sub.prototype = Object.create(base.prototype);
sub.prototype.constructor = sub;
for (var prop in prototype) {
sub.prototype[prop] = prototype[prop];
}
};
Util.loadScript = function Util_loadScript(src, callback) {
var script = document.createElement('script');
var loaded = false;
script.setAttribute('src', src);
if (callback) {
script.onload = function() {
if (!loaded) {
callback();
}
loaded = true;
};
}
document.getElementsByTagName('head')[0].appendChild(script);
};
return Util;
})();
/**
* PDF page viewport created based on scale, rotation and offset.
* @class
* @alias PDFJS.PageViewport
*/
var PageViewport = PDFJS.PageViewport = (function PageViewportClosure() {
/**
* @constructor
* @private
* @param viewBox {Array} xMin, yMin, xMax and yMax coordinates.
* @param scale {number} scale of the viewport.
* @param rotation {number} rotations of the viewport in degrees.
* @param offsetX {number} offset X
* @param offsetY {number} offset Y
* @param dontFlip {boolean} if true, axis Y will not be flipped.
*/
function PageViewport(viewBox, scale, rotation, offsetX, offsetY, dontFlip) {
this.viewBox = viewBox;
this.scale = scale;
this.rotation = rotation;
this.offsetX = offsetX;
this.offsetY = offsetY;
// creating transform to convert pdf coordinate system to the normal
// canvas like coordinates taking in account scale and rotation
var centerX = (viewBox[2] + viewBox[0]) / 2;
var centerY = (viewBox[3] + viewBox[1]) / 2;
var rotateA, rotateB, rotateC, rotateD;
rotation = rotation % 360;
rotation = rotation < 0 ? rotation + 360 : rotation;
switch (rotation) {
case 180:
rotateA = -1; rotateB = 0; rotateC = 0; rotateD = 1;
break;
case 90:
rotateA = 0; rotateB = 1; rotateC = 1; rotateD = 0;
break;
case 270:
rotateA = 0; rotateB = -1; rotateC = -1; rotateD = 0;
break;
//case 0:
default:
rotateA = 1; rotateB = 0; rotateC = 0; rotateD = -1;
break;
}
if (dontFlip) {
rotateC = -rotateC; rotateD = -rotateD;
}
var offsetCanvasX, offsetCanvasY;
var width, height;
if (rotateA === 0) {
offsetCanvasX = Math.abs(centerY - viewBox[1]) * scale + offsetX;
offsetCanvasY = Math.abs(centerX - viewBox[0]) * scale + offsetY;
width = Math.abs(viewBox[3] - viewBox[1]) * scale;
height = Math.abs(viewBox[2] - viewBox[0]) * scale;
} else {
offsetCanvasX = Math.abs(centerX - viewBox[0]) * scale + offsetX;
offsetCanvasY = Math.abs(centerY - viewBox[1]) * scale + offsetY;
width = Math.abs(viewBox[2] - viewBox[0]) * scale;
height = Math.abs(viewBox[3] - viewBox[1]) * scale;
}
// creating transform for the following operations:
// translate(-centerX, -centerY), rotate and flip vertically,
// scale, and translate(offsetCanvasX, offsetCanvasY)
this.transform = [
rotateA * scale,
rotateB * scale,
rotateC * scale,
rotateD * scale,
offsetCanvasX - rotateA * scale * centerX - rotateC * scale * centerY,
offsetCanvasY - rotateB * scale * centerX - rotateD * scale * centerY
];
this.width = width;
this.height = height;
this.fontScale = scale;
}
PageViewport.prototype = /** @lends PDFJS.PageViewport.prototype */ {
/**
* Clones viewport with additional properties.
* @param args {Object} (optional) If specified, may contain the 'scale' or
* 'rotation' properties to override the corresponding properties in
* the cloned viewport.
* @returns {PDFJS.PageViewport} Cloned viewport.
*/
clone: function PageViewPort_clone(args) {
args = args || {};
var scale = 'scale' in args ? args.scale : this.scale;
var rotation = 'rotation' in args ? args.rotation : this.rotation;
return new PageViewport(this.viewBox.slice(), scale, rotation,
this.offsetX, this.offsetY, args.dontFlip);
},
/**
* Converts PDF point to the viewport coordinates. For examples, useful for
* converting PDF location into canvas pixel coordinates.
* @param x {number} X coordinate.
* @param y {number} Y coordinate.
* @returns {Object} Object that contains 'x' and 'y' properties of the
* point in the viewport coordinate space.
* @see {@link convertToPdfPoint}
* @see {@link convertToViewportRectangle}
*/
convertToViewportPoint: function PageViewport_convertToViewportPoint(x, y) {
return Util.applyTransform([x, y], this.transform);
},
/**
* Converts PDF rectangle to the viewport coordinates.
* @param rect {Array} xMin, yMin, xMax and yMax coordinates.
* @returns {Array} Contains corresponding coordinates of the rectangle
* in the viewport coordinate space.
* @see {@link convertToViewportPoint}
*/
convertToViewportRectangle:
function PageViewport_convertToViewportRectangle(rect) {
var tl = Util.applyTransform([rect[0], rect[1]], this.transform);
var br = Util.applyTransform([rect[2], rect[3]], this.transform);
return [tl[0], tl[1], br[0], br[1]];
},
/**
* Converts viewport coordinates to the PDF location. For examples, useful
* for converting canvas pixel location into PDF one.
* @param x {number} X coordinate.
* @param y {number} Y coordinate.
* @returns {Object} Object that contains 'x' and 'y' properties of the
* point in the PDF coordinate space.
* @see {@link convertToViewportPoint}
*/
convertToPdfPoint: function PageViewport_convertToPdfPoint(x, y) {
return Util.applyInverseTransform([x, y], this.transform);
}
};
return PageViewport;
})();
var PDFStringTranslateTable = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0x2D8, 0x2C7, 0x2C6, 0x2D9, 0x2DD, 0x2DB, 0x2DA, 0x2DC, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x2022, 0x2020, 0x2021, 0x2026, 0x2014,
0x2013, 0x192, 0x2044, 0x2039, 0x203A, 0x2212, 0x2030, 0x201E, 0x201C,
0x201D, 0x2018, 0x2019, 0x201A, 0x2122, 0xFB01, 0xFB02, 0x141, 0x152, 0x160,
0x178, 0x17D, 0x131, 0x142, 0x153, 0x161, 0x17E, 0, 0x20AC
];
function stringToPDFString(str) {
var i, n = str.length, strBuf = [];
if (str[0] === '\xFE' && str[1] === '\xFF') {
// UTF16BE BOM
for (i = 2; i < n; i += 2) {
strBuf.push(String.fromCharCode(
(str.charCodeAt(i) << 8) | str.charCodeAt(i + 1)));
}
} else {
for (i = 0; i < n; ++i) {
var code = PDFStringTranslateTable[str.charCodeAt(i)];
strBuf.push(code ? String.fromCharCode(code) : str.charAt(i));
}
}
return strBuf.join('');
}
function stringToUTF8String(str) {
return decodeURIComponent(escape(str));
}
function isEmptyObj(obj) {
for (var key in obj) {
return false;
}
return true;
}
function isBool(v) {
return typeof v === 'boolean';
}
function isInt(v) {
return typeof v === 'number' && ((v | 0) === v);
}
function isNum(v) {
return typeof v === 'number';
}
function isString(v) {
return typeof v === 'string';
}
function isNull(v) {
return v === null;
}
function isName(v) {
return v instanceof Name;
}
function isCmd(v, cmd) {
return v instanceof Cmd && (cmd === undefined || v.cmd === cmd);
}
function isDict(v, type) {
if (!(v instanceof Dict)) {
return false;
}
if (!type) {
return true;
}
var dictType = v.get('Type');
return isName(dictType) && dictType.name === type;
}
function isArray(v) {
return v instanceof Array;
}
function isStream(v) {
return typeof v === 'object' && v !== null && v.getBytes !== undefined;
}
function isArrayBuffer(v) {
return typeof v === 'object' && v !== null && v.byteLength !== undefined;
}
function isRef(v) {
return v instanceof Ref;
}
/**
* Promise Capability object.
*
* @typedef {Object} PromiseCapability
* @property {Promise} promise - A promise object.
* @property {function} resolve - Fullfills the promise.
* @property {function} reject - Rejects the promise.
*/
/**
* Creates a promise capability object.
* @alias PDFJS.createPromiseCapability
*
* @return {PromiseCapability} A capability object contains:
* - a Promise, resolve and reject methods.
*/
function createPromiseCapability() {
var capability = {};
capability.promise = new Promise(function (resolve, reject) {
capability.resolve = resolve;
capability.reject = reject;
});
return capability;
}
PDFJS.createPromiseCapability = createPromiseCapability;
/**
* Polyfill for Promises:
* The following promise implementation tries to generally implement the
* Promise/A+ spec. Some notable differences from other promise libaries are:
* - There currently isn't a seperate deferred and promise object.
* - Unhandled rejections eventually show an error if they aren't handled.
*
* Based off of the work in:
* https://bugzilla.mozilla.org/show_bug.cgi?id=810490
*/
(function PromiseClosure() {
if (globalScope.Promise) {
// Promises existing in the DOM/Worker, checking presence of all/resolve
if (typeof globalScope.Promise.all !== 'function') {
globalScope.Promise.all = function (iterable) {
var count = 0, results = [], resolve, reject;
var promise = new globalScope.Promise(function (resolve_, reject_) {
resolve = resolve_;
reject = reject_;
});
iterable.forEach(function (p, i) {
count++;
p.then(function (result) {
results[i] = result;
count--;
if (count === 0) {
resolve(results);
}
}, reject);
});
if (count === 0) {
resolve(results);
}
return promise;
};
}
if (typeof globalScope.Promise.resolve !== 'function') {
globalScope.Promise.resolve = function (value) {
return new globalScope.Promise(function (resolve) { resolve(value); });
};
}
if (typeof globalScope.Promise.reject !== 'function') {
globalScope.Promise.reject = function (reason) {
return new globalScope.Promise(function (resolve, reject) {
reject(reason);
});
};
}
if (typeof globalScope.Promise.prototype.catch !== 'function') {
globalScope.Promise.prototype.catch = function (onReject) {
return globalScope.Promise.prototype.then(undefined, onReject);
};
}
return;
}
//#if !MOZCENTRAL
var STATUS_PENDING = 0;
var STATUS_RESOLVED = 1;
var STATUS_REJECTED = 2;
// In an attempt to avoid silent exceptions, unhandled rejections are
// tracked and if they aren't handled in a certain amount of time an
// error is logged.
var REJECTION_TIMEOUT = 500;
var HandlerManager = {
handlers: [],
running: false,
unhandledRejections: [],
pendingRejectionCheck: false,
scheduleHandlers: function scheduleHandlers(promise) {
if (promise._status === STATUS_PENDING) {
return;
}
this.handlers = this.handlers.concat(promise._handlers);
promise._handlers = [];
if (this.running) {
return;
}
this.running = true;
setTimeout(this.runHandlers.bind(this), 0);
},
runHandlers: function runHandlers() {
var RUN_TIMEOUT = 1; // ms
var timeoutAt = Date.now() + RUN_TIMEOUT;
while (this.handlers.length > 0) {
var handler = this.handlers.shift();
var nextStatus = handler.thisPromise._status;
var nextValue = handler.thisPromise._value;
try {
if (nextStatus === STATUS_RESOLVED) {
if (typeof handler.onResolve === 'function') {
nextValue = handler.onResolve(nextValue);
}
} else if (typeof handler.onReject === 'function') {
nextValue = handler.onReject(nextValue);
nextStatus = STATUS_RESOLVED;
if (handler.thisPromise._unhandledRejection) {
this.removeUnhandeledRejection(handler.thisPromise);
}
}
} catch (ex) {
nextStatus = STATUS_REJECTED;
nextValue = ex;
}
handler.nextPromise._updateStatus(nextStatus, nextValue);
if (Date.now() >= timeoutAt) {
break;
}
}
if (this.handlers.length > 0) {
setTimeout(this.runHandlers.bind(this), 0);
return;
}
this.running = false;
},
addUnhandledRejection: function addUnhandledRejection(promise) {
this.unhandledRejections.push({
promise: promise,
time: Date.now()
});
this.scheduleRejectionCheck();
},
removeUnhandeledRejection: function removeUnhandeledRejection(promise) {
promise._unhandledRejection = false;
for (var i = 0; i < this.unhandledRejections.length; i++) {
if (this.unhandledRejections[i].promise === promise) {
this.unhandledRejections.splice(i);
i--;
}
}
},
scheduleRejectionCheck: function scheduleRejectionCheck() {
if (this.pendingRejectionCheck) {
return;
}
this.pendingRejectionCheck = true;
setTimeout(function rejectionCheck() {
this.pendingRejectionCheck = false;
var now = Date.now();
for (var i = 0; i < this.unhandledRejections.length; i++) {
if (now - this.unhandledRejections[i].time > REJECTION_TIMEOUT) {
var unhandled = this.unhandledRejections[i].promise._value;
var msg = 'Unhandled rejection: ' + unhandled;
if (unhandled.stack) {
msg += '\n' + unhandled.stack;
}
warn(msg);
this.unhandledRejections.splice(i);
i--;
}
}
if (this.unhandledRejections.length) {
this.scheduleRejectionCheck();
}
}.bind(this), REJECTION_TIMEOUT);
}
};
function Promise(resolver) {
this._status = STATUS_PENDING;
this._handlers = [];
try {
resolver.call(this, this._resolve.bind(this), this._reject.bind(this));
} catch (e) {
this._reject(e);
}
}
/**
* Builds a promise that is resolved when all the passed in promises are
* resolved.
* @param {array} array of data and/or promises to wait for.
* @return {Promise} New dependant promise.
*/
Promise.all = function Promise_all(promises) {
var resolveAll, rejectAll;
var deferred = new Promise(function (resolve, reject) {
resolveAll = resolve;
rejectAll = reject;
});
var unresolved = promises.length;
var results = [];
if (unresolved === 0) {
resolveAll(results);
return deferred;
}
function reject(reason) {
if (deferred._status === STATUS_REJECTED) {
return;
}
results = [];
rejectAll(reason);
}
for (var i = 0, ii = promises.length; i < ii; ++i) {
var promise = promises[i];
var resolve = (function(i) {
return function(value) {
if (deferred._status === STATUS_REJECTED) {
return;
}
results[i] = value;
unresolved--;
if (unresolved === 0) {
resolveAll(results);
}
};
})(i);
if (Promise.isPromise(promise)) {
promise.then(resolve, reject);
} else {
resolve(promise);
}
}
return deferred;
};
/**
* Checks if the value is likely a promise (has a 'then' function).
* @return {boolean} true if value is thenable
*/
Promise.isPromise = function Promise_isPromise(value) {
return value && typeof value.then === 'function';
};
/**
* Creates resolved promise
* @param value resolve value
* @returns {Promise}
*/
Promise.resolve = function Promise_resolve(value) {
return new Promise(function (resolve) { resolve(value); });
};
/**
* Creates rejected promise
* @param reason rejection value
* @returns {Promise}
*/
Promise.reject = function Promise_reject(reason) {
return new Promise(function (resolve, reject) { reject(reason); });
};
Promise.prototype = {
_status: null,
_value: null,
_handlers: null,
_unhandledRejection: null,
_updateStatus: function Promise__updateStatus(status, value) {
if (this._status === STATUS_RESOLVED ||
this._status === STATUS_REJECTED) {
return;
}
if (status === STATUS_RESOLVED &&
Promise.isPromise(value)) {
value.then(this._updateStatus.bind(this, STATUS_RESOLVED),
this._updateStatus.bind(this, STATUS_REJECTED));
return;
}
this._status = status;
this._value = value;
if (status === STATUS_REJECTED && this._handlers.length === 0) {
this._unhandledRejection = true;
HandlerManager.addUnhandledRejection(this);
}
HandlerManager.scheduleHandlers(this);
},
_resolve: function Promise_resolve(value) {
this._updateStatus(STATUS_RESOLVED, value);
},
_reject: function Promise_reject(reason) {
this._updateStatus(STATUS_REJECTED, reason);
},
then: function Promise_then(onResolve, onReject) {
var nextPromise = new Promise(function (resolve, reject) {
this.resolve = resolve;
this.reject = reject;
});
this._handlers.push({
thisPromise: this,
onResolve: onResolve,
onReject: onReject,
nextPromise: nextPromise
});
HandlerManager.scheduleHandlers(this);
return nextPromise;
},
catch: function Promise_catch(onReject) {
return this.then(undefined, onReject);
}
};
globalScope.Promise = Promise;
//#else
//throw new Error('DOM Promise is not present');
//#endif
})();
var StatTimer = (function StatTimerClosure() {
function rpad(str, pad, length) {
while (str.length < length) {
str += pad;
}
return str;
}
function StatTimer() {
this.started = {};
this.times = [];
this.enabled = true;
}
StatTimer.prototype = {
time: function StatTimer_time(name) {
if (!this.enabled) {
return;
}
if (name in this.started) {
warn('Timer is already running for ' + name);
}
this.started[name] = Date.now();
},
timeEnd: function StatTimer_timeEnd(name) {
if (!this.enabled) {
return;
}
if (!(name in this.started)) {
warn('Timer has not been started for ' + name);
}
this.times.push({
'name': name,
'start': this.started[name],
'end': Date.now()
});
// Remove timer from started so it can be called again.
delete this.started[name];
},
toString: function StatTimer_toString() {
var i, ii;
var times = this.times;
var out = '';
// Find the longest name for padding purposes.
var longest = 0;
for (i = 0, ii = times.length; i < ii; ++i) {
var name = times[i]['name'];
if (name.length > longest) {
longest = name.length;
}
}
for (i = 0, ii = times.length; i < ii; ++i) {
var span = times[i];
var duration = span.end - span.start;
out += rpad(span['name'], ' ', longest) + ' ' + duration + 'ms\n';
}
return out;
}
};
return StatTimer;
})();
PDFJS.createBlob = function createBlob(data, contentType) {
if (typeof Blob !== 'undefined') {
return new Blob([data], { type: contentType });
}
// Blob builder is deprecated in FF14 and removed in FF18.
var bb = new MozBlobBuilder();
bb.append(data);
return bb.getBlob(contentType);
};
PDFJS.createObjectURL = (function createObjectURLClosure() {
// Blob/createObjectURL is not available, falling back to data schema.
var digits =
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
return function createObjectURL(data, contentType) {
if (!PDFJS.disableCreateObjectURL &&
typeof URL !== 'undefined' && URL.createObjectURL) {
var blob = PDFJS.createBlob(data, contentType);
return URL.createObjectURL(blob);
}
var buffer = 'data:' + contentType + ';base64,';
for (var i = 0, ii = data.length; i < ii; i += 3) {
var b1 = data[i] & 0xFF;
var b2 = data[i + 1] & 0xFF;
var b3 = data[i + 2] & 0xFF;
var d1 = b1 >> 2, d2 = ((b1 & 3) << 4) | (b2 >> 4);
var d3 = i + 1 < ii ? ((b2 & 0xF) << 2) | (b3 >> 6) : 64;
var d4 = i + 2 < ii ? (b3 & 0x3F) : 64;
buffer += digits[d1] + digits[d2] + digits[d3] + digits[d4];
}
return buffer;
};
})();
function MessageHandler(name, comObj) {
this.name = name;
this.comObj = comObj;
this.callbackIndex = 1;
this.postMessageTransfers = true;
var callbacksCapabilities = this.callbacksCapabilities = {};
var ah = this.actionHandler = {};
ah['console_log'] = [function ahConsoleLog(data) {
console.log.apply(console, data);
}];
ah['console_error'] = [function ahConsoleError(data) {
console.error.apply(console, data);
}];
ah['_unsupported_feature'] = [function ah_unsupportedFeature(data) {
UnsupportedManager.notify(data);
}];
comObj.onmessage = function messageHandlerComObjOnMessage(event) {
var data = event.data;
if (data.isReply) {
var callbackId = data.callbackId;
if (data.callbackId in callbacksCapabilities) {
var callback = callbacksCapabilities[callbackId];
delete callbacksCapabilities[callbackId];
if ('error' in data) {
callback.reject(data.error);
} else {
callback.resolve(data.data);
}
} else {
error('Cannot resolve callback ' + callbackId);
}
} else if (data.action in ah) {
var action = ah[data.action];
if (data.callbackId) {
Promise.resolve().then(function () {
return action[0].call(action[1], data.data);
}).then(function (result) {
comObj.postMessage({
isReply: true,
callbackId: data.callbackId,
data: result
});
}, function (reason) {
comObj.postMessage({
isReply: true,
callbackId: data.callbackId,
error: reason
});
});
} else {
action[0].call(action[1], data.data);
}
} else {
error('Unknown action from worker: ' + data.action);
}
};
}
MessageHandler.prototype = {
on: function messageHandlerOn(actionName, handler, scope) {
var ah = this.actionHandler;
if (ah[actionName]) {
error('There is already an actionName called "' + actionName + '"');
}
ah[actionName] = [handler, scope];
},
/**
* Sends a message to the comObj to invoke the action with the supplied data.
* @param {String} actionName Action to call.
* @param {JSON} data JSON data to send.
* @param {Array} [transfers] Optional list of transfers/ArrayBuffers
*/
send: function messageHandlerSend(actionName, data, transfers) {
var message = {
action: actionName,
data: data
};
this.postMessage(message, transfers);
},
/**
* Sends a message to the comObj to invoke the action with the supplied data.
* Expects that other side will callback with the response.
* @param {String} actionName Action to call.
* @param {JSON} data JSON data to send.
* @param {Array} [transfers] Optional list of transfers/ArrayBuffers.
* @returns {Promise} Promise to be resolved with response data.
*/
sendWithPromise:
function messageHandlerSendWithPromise(actionName, data, transfers) {
var callbackId = this.callbackIndex++;
var message = {
action: actionName,
data: data,
callbackId: callbackId
};
var capability = createPromiseCapability();
this.callbacksCapabilities[callbackId] = capability;
try {
this.postMessage(message, transfers);
} catch (e) {
capability.reject(e);
}
return capability.promise;
},
/**
* Sends raw message to the comObj.
* @private
* @param message {Object} Raw message.
* @param transfers List of transfers/ArrayBuffers, or undefined.
*/
postMessage: function (message, transfers) {
if (transfers && this.postMessageTransfers) {
this.comObj.postMessage(message, transfers);
} else {
this.comObj.postMessage(message);
}
}
};
function loadJpegStream(id, imageUrl, objs) {
var img = new Image();
img.onload = (function loadJpegStream_onloadClosure() {
objs.resolve(id, img);
});
img.onerror = (function loadJpegStream_onerrorClosure() {
objs.resolve(id, null);
warn('Error during JPEG image loading');
});
img.src = imageUrl;
}<|fim▁end|>
|
lineTo: 14,
curveTo: 15,
curveTo2: 16,
curveTo3: 17,
|
<|file_name|>FormatIndentDecreaseSharp.js<|end_file_name|><|fim▁begin|>import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(
<React.Fragment><path d="M11 17h10v-2H11v2zm-8-5l4 4V8l-4 4zm0 9h18v-2H3v2zM3 3v2h18V3H3zm8 6h10V7H11v2zm0 4h10v-2H11v2z" /></React.Fragment><|fim▁hole|>, 'FormatIndentDecreaseSharp');<|fim▁end|>
| |
<|file_name|>catalog.test.ts<|end_file_name|><|fim▁begin|>import fs from "fs-extra"
import path from "path"
import mockFs from "mock-fs"
import { mockConsole, mockConfig } from "@lingui/jest-mocks"
import {
getCatalogs,
getCatalogForFile,
getCatalogForMerge,
Catalog,
cleanObsolete,
order,
normalizeRelativePath,
} from "./catalog"
import { createCompiledCatalog } from "./compile"
import {
copyFixture,
defaultMakeOptions,
defaultMakeTemplateOptions,
makeNextMessage,
defaultMergeOptions,
makeCatalog,
makePrevMessage,
} from "../tests"
export const fixture = (...dirs) =>
path.resolve(__dirname, path.join("fixtures", ...dirs)) +
// preserve trailing slash
(dirs[dirs.length - 1].endsWith("/") ? "/" : "")
describe("Catalog", function () {
afterEach(() => {
mockFs.restore()
})
describe("make", function () {
it("should collect and write catalogs", async function () {
const localeDir = copyFixture(fixture("locales", "initial"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}", "messages"),
include: [
fixture("collect/componentA/"),
fixture("collect/componentB"),
],
exclude: [],
},
mockConfig({
locales: ["en", "cs"],
})
)
// Everything should be empty
expect(catalog.readAll()).toMatchSnapshot()
await catalog.make(defaultMakeOptions)
expect(catalog.readAll()).toMatchSnapshot()
})
it("should only update the specified locale", async function () {
const localeDir = copyFixture(fixture("locales", "initial"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}", "messages"),
include: [
fixture("collect/componentA/"),
fixture("collect/componentB"),
],
exclude: [],
},
mockConfig({
locales: ["en", "cs"],
})
)
// Everything should be empty
expect(catalog.readAll()).toMatchSnapshot()
await catalog.make({ ...defaultMakeOptions, locale: "en" })
expect(catalog.readAll()).toMatchSnapshot()
})
it("should merge with existing catalogs", async function () {
const localeDir = copyFixture(fixture("locales", "existing"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}"),
include: [fixture("collect/")],
exclude: [],
},
mockConfig({
locales: ["en", "cs"],
})
)
// Everything should be empty
expect(catalog.readAll()).toMatchSnapshot()
await catalog.make(defaultMakeOptions)
expect(catalog.readAll()).toMatchSnapshot()
})
})
describe("makeTemplate", function () {
it("should collect and write a template", async function () {
const localeDir = copyFixture(fixture("locales", "initial"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}", "messages"),
include: [
fixture("collect/componentA/"),
fixture("collect/componentB"),
],
exclude: [],
},
mockConfig({
locales: ["en", "cs"],
})
)
// Everything should be empty
expect(catalog.readTemplate()).toMatchSnapshot()
await catalog.makeTemplate(defaultMakeTemplateOptions)
expect(catalog.readTemplate()).toMatchSnapshot()
})
})
describe("collect", function () {
it("should extract messages from source files", async function () {
const catalog = new Catalog(
{
name: "messages",
path: "locales/{locale}",
include: [fixture("collect/")],
exclude: [],
},
mockConfig()
)
const messages = await catalog.collect(defaultMakeOptions)
expect(messages).toMatchSnapshot()
})
it("should extract only files passed on options", async function () {
const catalog = new Catalog(
{
name: "messages",
path: "locales/{locale}",
include: [fixture("collect/componentA"), fixture("collect/componentB.js")],
exclude: [],
},
mockConfig()
)
const messages = await catalog.collect({
...defaultMakeOptions,
files: [fixture("collect/componentA")]
})
expect(messages).toMatchSnapshot()
})
it("should handle errors", function () {
const catalog = new Catalog(
{
name: "messages",
path: "locales/{locale}",
include: [fixture("collect-invalid/")],
exclude: [],
},
mockConfig()
)
mockConsole(async (console) => {
const messages = await catalog.collect(defaultMakeOptions)
expect(console.error).toBeCalledWith(
expect.stringContaining(`Cannot process file`)
)
expect(messages).toMatchSnapshot()
})
})
})
describe("merge", function () {
/*
catalog.merge(prevCatalogs, nextCatalog, options)
prevCatalogs - map of message catalogs in all available languages with translations
nextCatalog - language-agnostic catalog with collected messages
Note: if a catalog in prevCatalogs is null it means the language is available, but
no previous catalog was generated (usually first run).
Orthogonal use-cases
--------------------
Message IDs:
- auto-generated IDs: message is used as a key, `defaults` is not set
- custom IDs: message is used as `defaults`, custom ID as a key
Source locale (defined by `sourceLocale` in config):
- catalog for `sourceLocale`: initially, `translation` is prefilled with `defaults`
(for custom IDs) or `key` (for auto-generated IDs)
- all other languages: translation is kept empty
*/
it("should initialize catalog", function () {
const prevCatalogs = { en: null, cs: null }
const nextCatalog = {
"custom.id": makeNextMessage({
message: "Message with custom ID",
}),
"Message with <0>auto-generated</0> ID": makeNextMessage(),
}
expect(
makeCatalog({ sourceLocale: "en" }).merge(
prevCatalogs,
nextCatalog,
defaultMergeOptions
)
).toEqual({
// catalog for sourceLocale - translation is prefilled
en: {
"custom.id": expect.objectContaining({
message: "Message with custom ID",
translation: "Message with custom ID",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Message with <0>auto-generated</0> ID",
}),
},
// catalog for other than sourceLocale - translation is empty
cs: {
"custom.id": expect.objectContaining({
message: "Message with custom ID",
translation: "",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "",
}),
},
})
})
it("should merge translations from existing catalogs", function () {
const prevCatalogs = {
en: {
"custom.id": makePrevMessage({
message: "Message with custom ID",
translation: "Message with custom ID",
}),
"Message with <0>auto-generated</0> ID": makePrevMessage({
translation: "Message with <0>auto-generated</0> ID",
}),
},
cs: {
"custom.id": makePrevMessage({
message: "Message with custom ID",
translation: "Translation of message with custom ID",
}),
"Message with <0>auto-generated</0> ID": makePrevMessage({
translation: "Translation of message with auto-generated ID",
}),
},
}
const nextCatalog = {
"custom.id": makeNextMessage({
message: "Message with custom ID, possibly changed",
}),
"new.id": makeNextMessage({
message: "Completely new message",
}),
"Message with <0>auto-generated</0> ID": makeNextMessage(),
"New message": makeNextMessage(),
}
expect(
makeCatalog({ sourceLocale: "en" }).merge(
prevCatalogs,
nextCatalog,
defaultMergeOptions
)
).toEqual({
en: {
"custom.id": expect.objectContaining({
message: "Message with custom ID, possibly changed",
translation: "Message with custom ID, possibly changed",
}),
"new.id": expect.objectContaining({
message: "Completely new message",
translation: "Completely new message",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Message with <0>auto-generated</0> ID",
}),
"New message": expect.objectContaining({
translation: "New message",
}),
},
cs: {
"custom.id": expect.objectContaining({
message: "Message with custom ID, possibly changed",
translation: "Translation of message with custom ID",
}),
"new.id": expect.objectContaining({
message: "Completely new message",
translation: "",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Translation of message with auto-generated ID",
}),
"New message": expect.objectContaining({
translation: "",
}),
},
})
})
it("should force overwrite of defaults", function () {
const prevCatalogs = {
en: {
"custom.id": makePrevMessage({
message: "",
translation: "Message with custom ID",
}),
"Message with <0>auto-generated</0> ID": makePrevMessage({
translation: "Source of message with <0>auto-generated</0> ID",
}),
},
cs: {
"custom.id": makePrevMessage({
message: "",
translation: "Translation of message with custom ID",
}),
"Message with <0>auto-generated</0> ID": makePrevMessage({
translation: "Translation of message with auto-generated ID",
}),
},
}
const nextCatalog = {
"custom.id": makeNextMessage({
message: "Message with custom ID, possibly changed",
}),
"Message with <0>auto-generated</0> ID": makeNextMessage(),
}
// Without `overwrite`:
// The translations of all IDs for `sourceLocale` are kept intact
expect(
makeCatalog({ sourceLocale: "en" }).merge(
prevCatalogs,
nextCatalog,
defaultMergeOptions
)
).toEqual({
en: {
"custom.id": expect.objectContaining({
message: "Message with custom ID, possibly changed",
translation: "Message with custom ID",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Source of message with <0>auto-generated</0> ID",
}),
},
cs: {
"custom.id": expect.objectContaining({
message: "Message with custom ID, possibly changed",
translation: "Translation of message with custom ID",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Translation of message with auto-generated ID",
}),
},
})
// With `overwrite`
// The translations of all IDs for `sourceLocale` are changed
expect(
makeCatalog({ sourceLocale: "en" }).merge(prevCatalogs, nextCatalog, {
overwrite: true,
})
).toEqual({
en: {
"custom.id": expect.objectContaining({
message: "Message with custom ID, possibly changed",
translation: "Message with custom ID, possibly changed",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Message with <0>auto-generated</0> ID",
}),
},
cs: {
"custom.id": expect.objectContaining({
message: "Message with custom ID, possibly changed",
translation: "Translation of message with custom ID",
}),
"Message with <0>auto-generated</0> ID": expect.objectContaining({
translation: "Translation of message with auto-generated ID",
}),
},
})
})
it("should mark obsolete messages", function () {
const prevCatalogs = {
en: {
"msg.hello": makePrevMessage({
translation: "Hello World",
}),
},
}
const nextCatalog = {}
expect(
makeCatalog().merge(prevCatalogs, nextCatalog, defaultMergeOptions)
).toEqual({
en: {
"msg.hello": expect.objectContaining({
translation: "Hello World",
obsolete: true,
}),
},
})
})
})
describe("read", function () {
it("should return null if file does not exist", function () {
// mock empty filesystem
mockFs()
const catalog = new Catalog(
{
name: "messages",
path: "locales/{locale}",
include: [],
exclude: [],
},
mockConfig()
)
const messages = catalog.read("en")
expect(messages).toBeNull()
})
it("should read file in given format", function () {
mockFs({
en: {
"messages.po": fs.readFileSync(
path.resolve(__dirname, "formats/fixtures/messages.po")
),
},
})
const catalog = new Catalog(
{
name: "messages",
path: "{locale}/messages",
include: [],
},
mockConfig()
)
const messages = catalog.read("en")
mockFs.restore()
expect(messages).toMatchSnapshot()
})
it("should read file in previous format", function () {
mockFs({
en: {
"messages.json": fs.readFileSync(
path.resolve(__dirname, "formats/fixtures/messages.json")
),
},
})
const catalog = new Catalog(
{
name: "messages",
path: "{locale}/messages",
include: [],
},
mockConfig({ prevFormat: "minimal" })
)
const messages = catalog.read("en")
mockFs.restore()
expect(messages).toMatchSnapshot()
})
})
describe("readAll", function () {
it("should read existing catalogs for all locales", function () {
const catalog = new Catalog(
{
name: "messages",
path: path.resolve(
__dirname,
path.join("fixtures", "readAll", "{locale}", "messages")
),
include: [],
},
mockConfig({
locales: ["en", "cs"],
})
)
const messages = catalog.readAll()
expect(messages).toMatchSnapshot()
})
})
/**
* Convert JSON format to PO and then back to JSON.
* - Compare that original and converted JSON file are identical
* - Check the content of PO file
*/
it.skip("should convert catalog format", function () {
mockFs({
en: {
"messages.json": fs.readFileSync(
path.resolve(__dirname, "formats/fixtures/messages.json")
),
"messages.po": mockFs.file(),
},
})
const fileContent = (format) =>
fs
.readFileSync("./en/messages." + (format === "po" ? "po" : "json"))
.toString()
.trim()
const catalogConfig = {
name: "messages",
path: "{locale}/messages",
include: [],
}
const originalJson = fileContent("json")
const po2json = new Catalog(
catalogConfig,
mockConfig({
format: "po",
prevFormat: "minimal",
})
)
po2json.write("en", po2json.read("en"))
const convertedPo = fileContent("po")
const json2po = new Catalog(
catalogConfig,
mockConfig({
format: "minimal",
prevFormat: "po",
localeDir: ".",
})
)
json2po.write("en", json2po.read("en"))
const convertedJson = fileContent("json")
mockFs.restore()
expect(originalJson).toEqual(convertedJson)
expect(convertedPo).toMatchSnapshot()
})
})
describe("getCatalogs", function () {
afterEach(() => {
mockFs.restore()
})
it("should get single catalog if catalogPath doesn't include {name} pattern", function () {
const config = mockConfig({
catalogs: [
{
path: "./src/locales/{locale}",
include: ["./src/"],
},
],
})
expect(getCatalogs(config)).toEqual([
new Catalog(
{
name: null,
path: "src/locales/{locale}",
include: ["src/"],
exclude: [],
},
config
),
])
})
it("should have catalog name and ignore patterns", function () {
const config = mockConfig({
catalogs: [
{
path: "src/locales/{locale}/all",
include: ["src/", "/absolute/path/"],
exclude: ["node_modules/"],
},
],
})
expect(getCatalogs(config)).toEqual([
new Catalog(
{
name: "all",
path: "src/locales/{locale}/all",
include: ["src/", "/absolute/path/"],
exclude: ["node_modules/"],
},
config
),
])
})
it("should expand {name} for matching directories", function () {
mockFs({
componentA: {
"index.js": mockFs.file(),
},
componentB: {
"index.js": mockFs.file(),
},
})
const config = mockConfig({
catalogs: [
{
path: "{name}/locales/{locale}",
include: ["./{name}/"],
},
],
})
expect(getCatalogs(config)).toEqual([
new Catalog(
{
name: "componentA",
path: "componentA/locales/{locale}",
include: ["componentA/"],
exclude: [],
},
config
),
new Catalog(
{
name: "componentB",
path: "componentB/locales/{locale}",
include: ["componentB/"],
exclude: [],
},
config
),
])
})
it("shouldn't expand {name} for ignored directories", function () {
mockFs({
componentA: {
"index.js": mockFs.file(),
},
componentB: {
"index.js": mockFs.file(),
},
})
const config = mockConfig({
catalogs: [
{
path: "./{name}/locales/{locale}",
include: ["./{name}/"],
exclude: ["componentB/"],
},
],
})
expect(getCatalogs(config)).toEqual([
new Catalog(
{
name: "componentA",
path: "componentA/locales/{locale}",
include: ["componentA/"],
exclude: ["componentB/"],
},
config
),
])
})
it("should warn if catalogPath is a directory", function () {
expect(() =>
getCatalogs(
mockConfig({
catalogs: [
{
path: "./locales/{locale}/",
include: ["."],
},
],
})
)
).toThrowErrorMatchingSnapshot()
// Use valus from config in error message
expect(() =>
getCatalogs(
mockConfig({
locales: ["cs"],
format: "minimal",
catalogs: [
{
path: "./locales/{locale}/",
include: ["."],
},
],
})
)
).toThrowErrorMatchingSnapshot()
})
it("should warn about missing {name} pattern in catalog path", function () {
expect(() =>
getCatalogs(
mockConfig({
catalogs: [
{
path: "./locales/{locale}",
include: ["./{name}/"],
},
],
})
)
).toThrowErrorMatchingSnapshot()
})
})
describe("getCatalogForFile", function () {
it("should return null if catalog cannot be found", function () {
const catalogs = [<|fim▁hole|> new Catalog(
{
name: null,
path: "./src/locales/{locale}",
include: ["./src/"],
},
mockConfig()
),
]
expect(getCatalogForFile("./xyz/en.po", catalogs)).toBeNull()
})
it("should return matching catalog and locale", function () {
const catalog = new Catalog(
{
name: null,
path: "./src/locales/{locale}",
include: ["./src/"],
},
mockConfig({ format: "po" })
)
const catalogs = [catalog]
expect(getCatalogForFile("./src/locales/en.po", catalogs)).toEqual({
locale: "en",
catalog,
})
})
it("should work with Windows path delimiters", function () {
const catalog = new Catalog(
{
name: null,
path: ".\\src\\locales\\{locale}",
include: ["./src/"],
},
mockConfig({ format: "po" })
)
const catalogs = [catalog]
expect(getCatalogForFile("src\\locales\\en.po", catalogs)).toEqual({
locale: "en",
catalog,
})
})
})
describe("getCatalogForMerge", function () {
afterEach(() => {
mockFs.restore()
})
it("should return catalog for merged messages", function () {
const config = mockConfig({
catalogsMergePath: "locales/{locale}",
})
expect(getCatalogForMerge(config)).toEqual(
new Catalog(
{
name: null,
path: "locales/{locale}",
include: [],
exclude: [],
},
config
)
)
})
it("should return catalog with custom name for merged messages", function () {
const config = mockConfig({
catalogsMergePath: "locales/{locale}/my/dir",
})
expect(getCatalogForMerge(config)).toEqual(
new Catalog(
{
name: "dir",
path: "locales/{locale}/my/dir",
include: [],
exclude: [],
},
config
)
)
})
it("should throw error if catalogsMergePath ends with slash", function () {
const config = mockConfig({
catalogsMergePath: "locales/{locale}/bad/path/",
})
expect.assertions(1)
try {
getCatalogForMerge(config)
} catch (e) {
expect(e.message).toBe(
'Remove trailing slash from "locales/{locale}/bad/path/". Catalog path isn\'t a directory, but translation file without extension. For example, catalog path "locales/{locale}/bad/path" results in translation file "locales/en/bad/path.po".'
)
}
})
it("should throw error if {locale} is omitted from catalogsMergePath", function () {
const config = mockConfig({
catalogsMergePath: "locales/bad/path",
})
expect.assertions(1)
try {
getCatalogForMerge(config)
} catch (e) {
expect(e.message).toBe(
"Invalid catalog path: {locale} variable is missing"
)
}
})
})
describe("normalizeRelativePath", function () {
afterEach(() => {
mockFs.restore()
})
it("should preserve absolute paths - posix", function () {
const absolute = "/my/directory"
expect(normalizeRelativePath(absolute)).toEqual(absolute)
})
it("should preserve absolute paths - win32", function () {
const absolute = "C:\\my\\directory"
// path remains the same, but separators are converted to posix
expect(normalizeRelativePath(absolute)).toEqual(
absolute.split("\\").join("/")
)
})
it("directories without ending slash are correctly treaten as dirs", function() {
mockFs({
componentA: {
"index.js": mockFs.file(),
},
"componentB": mockFs.file(),
})
// checked correctly that is a dir, cuz added that ending slash
expect(normalizeRelativePath("./componentA")).toEqual("componentA/")
// ComponentB is a file shouldn't add ending slash
expect(normalizeRelativePath("./componentB")).toEqual("componentB")
})
})
describe("cleanObsolete", function () {
it("should remove obsolete messages from catalog", function () {
const catalog = {
Label: makeNextMessage({
translation: "Label",
}),
PreviousLabel: makeNextMessage({
obsolete: true,
}),
}
expect(cleanObsolete(catalog)).toMatchSnapshot()
})
})
describe("order", function () {
it("should order messages alphabetically", function () {
const catalog = {
LabelB: makeNextMessage({
translation: "B",
}),
LabelA: makeNextMessage({
translation: "A",
}),
LabelD: makeNextMessage({
translation: "D",
}),
LabelC: makeNextMessage({
translation: "C",
}),
}
const orderedCatalogs = order("messageId")(catalog)
// Test that the message content is the same as before
expect(orderedCatalogs).toMatchSnapshot()
// Jest snapshot order the keys automatically, so test that the key order explicitly
expect(Object.keys(orderedCatalogs)).toMatchSnapshot()
})
it("should order messages by origin", function () {
const catalog = {
LabelB: makeNextMessage({
translation: "B",
origin: [
["file2.js", 2],
["file1.js", 2],
],
}),
LabelA: makeNextMessage({
translation: "A",
origin: [["file2.js", 3]],
}),
LabelD: makeNextMessage({
translation: "D",
origin: [["file2.js", 100]],
}),
LabelC: makeNextMessage({
translation: "C",
origin: [["file1.js", 1]],
}),
}
const orderedCatalogs = order("origin")(catalog)
// Test that the message content is the same as before
expect(orderedCatalogs).toMatchSnapshot()
// Jest snapshot order the keys automatically, so test that the key order explicitly
expect(Object.keys(orderedCatalogs)).toMatchSnapshot()
})
})
describe("writeCompiled", function () {
it("saves ES modules to .mjs files", function () {
const localeDir = copyFixture(fixture("locales", "initial/"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}", "messages"),
include: [],
exclude: [],
},
mockConfig()
)
const namespace = "es"
const compiledCatalog = createCompiledCatalog("en", {}, { namespace })
// Test that the file extension of the compiled catalog is `.mjs`
expect(catalog.writeCompiled("en", compiledCatalog, namespace)).toMatch(
/\.mjs$/
)
})
it("saves TS modules to .ts files", function () {
const localeDir = copyFixture(fixture("locales", "initial/"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}", "messages"),
include: [],
exclude: [],
},
mockConfig()
)
const namespace = "ts"
const compiledCatalog = createCompiledCatalog("en", {}, { namespace })
expect(catalog.writeCompiled("en", compiledCatalog, namespace)).toMatch(
/\.ts$/
)
})
it("saves anything else than ES modules to .js files", function () {
const localeDir = copyFixture(fixture("locales", "initial/"))
const catalog = new Catalog(
{
name: "messages",
path: path.join(localeDir, "{locale}", "messages"),
include: [],
exclude: [],
},
mockConfig()
)
let compiledCatalog = createCompiledCatalog("en", {}, {})
// Test that the file extension of the compiled catalog is `.js`
expect(catalog.writeCompiled("en", compiledCatalog)).toMatch(/\.js$/)
compiledCatalog = createCompiledCatalog("en", {}, { namespace: "cjs" })
expect(catalog.writeCompiled("en", compiledCatalog)).toMatch(/\.js$/)
compiledCatalog = createCompiledCatalog(
"en",
{},
{ namespace: "window.test" }
)
expect(catalog.writeCompiled("en", compiledCatalog)).toMatch(/\.js$/)
compiledCatalog = createCompiledCatalog(
"en",
{},
{ namespace: "global.test" }
)
expect(catalog.writeCompiled("en", compiledCatalog)).toMatch(/\.js$/)
})
})<|fim▁end|>
| |
<|file_name|>background.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* Extension ID of Files.app.
* @type {string}
* @const
*/
var FILE_MANAGER_EXTENSIONS_ID = 'hhaomjibdihmijegdhdafkllkbggdgoj';
/**
* Calls a remote test util in Files.app's extension. See: test_util.js.
*
* @param {string} func Function name.
* @param {?string} appId Target window's App ID or null for functions
* not requiring a window.
* @param {Array.<*>} args Array of arguments.
* @param {function(*)} callback Callback handling the function's result.
*/
function callRemoteTestUtil(func, appId, args, callback) {<|fim▁hole|> func: func,
appId: appId,
args: args
},
callback);
}
chrome.test.runTests([
// Waits for the C++ code to send a string identifying a test, then runs that
// test.
function testRunner() {
var command = chrome.extension.inIncognitoContext ? 'which test guest' :
'which test non-guest';
chrome.test.sendMessage(command, function(testCaseName) {
// Run one of the test cases defined in the testcase namespace, in
// test_cases.js. The test case name is passed via StartTest call in
// file_manager_browsertest.cc.
if (testcase[testCaseName])
testcase[testCaseName]();
else
chrome.test.fail('Bogus test name passed to testRunner()');
});
}
]);<|fim▁end|>
|
chrome.runtime.sendMessage(
FILE_MANAGER_EXTENSIONS_ID, {
|
<|file_name|>mail.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Caliopen mail message privacy features extraction methods."""
from __future__ import absolute_import, print_function, unicode_literals
import logging
import pgpy
from caliopen_main.pi.parameters import PIParameter
from .helpers.spam import SpamScorer
from .helpers.ingress_path import get_ingress_features
from .helpers.importance_level import compute_importance
from .types import init_features
log = logging.getLogger(__name__)
TLS_VERSION_PI = {
'tlsv1/sslv3': 2,
'tls1': 7,
'tlsv1': 7,
'tls12': 10,
}
PGP_MESSAGE_HEADER = '\n-----BEGIN PGP MESSAGE-----'
class InboundMailFeature(object):
"""Process a parsed mail message and extract available privacy features."""
def __init__(self, message, config):
"""Get a ``MailMessage`` instance and extract privacy features."""
self.message = message
self.config = config
self._features = init_features('message')
def is_blacklist_mx(self, mx):
"""MX is blacklisted."""
blacklisted = self.config.get('blacklistes.mx')
if not blacklisted:
return False
if mx in blacklisted:
return True
return False
def is_whitelist_mx(self, mx):
"""MX is whitelisted."""
whitelistes = self.config.get('whitelistes.mx')
if not whitelistes:
return False
if mx in whitelistes:
return True
return False
@property
def internal_domains(self):
"""Get internal hosts from configuration."""
domains = self.config.get('internal_domains')
return domains if domains else []
def emitter_reputation(self, mx):
"""Return features about emitter."""
if self.is_blacklist_mx(mx):
return 'blacklisted'
if self.is_whitelist_mx(mx):
return 'whitelisted'
return 'unknown'
def emitter_certificate(self):
"""Get the certificate from emitter."""<|fim▁hole|> return None
@property
def mail_agent(self):
"""Get the mailer used for this message."""
# XXX normalize better and more ?
return self.message.mail.get('X-Mailer', '').lower()
@property
def transport_signature(self):
"""Get the transport signature if any."""
return self.message.mail.get('DKIM-Signature')
@property
def spam_informations(self):
"""Return a global spam_score and related features."""
spam = SpamScorer(self.message.mail)
return {'spam_score': spam.score,
'spam_method': spam.method,
'is_spam': spam.is_spam}
@property
def is_internal(self):
"""Return true if it's an internal message."""
from_ = self.message.mail.get('From')
for domain in self.internal_domains:
if domain in from_:
return True
return False
def get_signature_informations(self):
"""Get message signature features."""
signed_parts = [x for x in self.message.attachments
if 'pgp-sign' in x.content_type]
if not signed_parts:
return {}
sign = pgpy.PGPSignature()
features = {'message_signed': True,
'message_signature_type': 'PGP'}
try:
sign.parse(signed_parts[0].data)
features.update({'message_signer': sign.signer})
except Exception as exc:
log.error('Unable to parse pgp signature {}'.format(exc))
return features
def get_encryption_informations(self):
"""Get message encryption features."""
is_encrypted = False
if 'encrypted' in self.message.extra_parameters:
is_encrypted = True
# Maybe pgp/inline ?
if not is_encrypted:
try:
body = self.message.body_plain.decode('utf-8')
if body.startswith(PGP_MESSAGE_HEADER):
is_encrypted = True
except UnicodeDecodeError:
log.warn('Invalid body_plain encoding for message')
pass
return {'message_encrypted': is_encrypted,
'message_encryption_method': 'pgp' if is_encrypted else ''}
def _get_features(self):
"""Extract privacy features."""
features = self._features.copy()
received = self.message.headers.get('Received', [])
features.update(get_ingress_features(received, self.internal_domains))
mx = features.get('ingress_server')
reputation = None if not mx else self.emitter_reputation(mx)
features['mail_emitter_mx_reputation'] = reputation
features['mail_emitter_certificate'] = self.emitter_certificate()
features['mail_agent'] = self.mail_agent
features['is_internal'] = self.is_internal
features.update(self.get_signature_informations())
features.update(self.get_encryption_informations())
features.update(self.spam_informations)
if self.transport_signature:
features.update({'transport_signed': True})
return features
def _compute_pi(self, participants, features):
"""Compute Privacy Indexes for a message."""
log.info('PI features {}'.format(features))
pi_cx = {} # Contextual privacy index
pi_co = {} # Comportemental privacy index
pi_t = {} # Technical privacy index
reput = features.get('mail_emitter_mx_reputation')
if reput == 'whitelisted':
pi_cx['reputation_whitelist'] = 20
elif reput == 'unknown':
pi_cx['reputation_unknow'] = 10
known_contacts = []
known_public_key = 0
for part, contact in participants:
if contact:
known_contacts.append(contact)
if contact.public_key:
known_public_key += 1
if len(participants) == len(known_contacts):
# - Si tous les contacts sont déjà connus le PIᶜˣ
# augmente de la valeur du PIᶜᵒ le plus bas des PIᶜᵒ des contacts.
contact_pi_cos = [x.pi['comportment'] for x in known_contacts
if x.pi and 'comportment' in x.pi]
if contact_pi_cos:
pi_cx['known_contacts'] = min(contact_pi_cos)
if known_public_key == len(known_contacts):
pi_co['contact_pubkey'] = 20
ext_hops = features.get('nb_external_hops', 0)
if ext_hops <= 1:
tls = features.get('ingress_socket_version')
if tls:
if tls not in TLS_VERSION_PI:
log.warn('Unknown TLS version {}'.format(tls))
else:
pi_t += TLS_VERSION_PI[tls]
if features.get('mail_emitter_certificate'):
pi_t['emitter_certificate'] = 10
if features.get('transport_signed'):
pi_t['transport_signed'] = 10
if features.get('message_encrypted'):
pi_t['encrypted'] = 30
log.info('PI compute t:{} cx:{} co:{}'.format(pi_t, pi_cx, pi_co))
return PIParameter({'technic': sum(pi_t.values()),
'context': sum(pi_cx.values()),
'comportment': sum(pi_co.values()),
'version': 0})
def process(self, user, message, participants):
"""
Process the message for privacy features and PI compute.
:param user: user the message belong to
:ptype user: caliopen_main.user.core.User
:param message: a message parameter that will be updated with PI
:ptype message: NewMessage
:param participants: an array of participant with related Contact
:ptype participants: list(Participant, Contact)
"""
features = self._get_features()
message.pi = self._compute_pi(participants, features)
il = compute_importance(user, message, features, participants)
message.privacy_features = features
message.importance_level = il<|fim▁end|>
| |
<|file_name|>middleware.py<|end_file_name|><|fim▁begin|># -*- coding:Utf-8 -*-
from django.conf import settings
from django.core.urlresolvers import is_valid_path
from django.http import HttpResponseRedirect
from django.utils.cache import patch_vary_headers
from django.utils import translation
from django.middleware.locale import LocaleMiddleware
from corsheaders.middleware import CorsMiddleware
__all__ = (
'VosaeLocaleMiddleware',
)
class VosaeLocaleMiddleware(LocaleMiddleware):
def process_response(self, request, response):
language = translation.get_language()
# Check if app has i18n_patterns urlconf
is_i18n_pattern = hasattr(request, 'resolver_match') and getattr(request.resolver_match, 'app_name', None) in ('account',)
# If path is '/', resolver_match is errored and not provided
if request.path == '/' and request.user.is_anonymous():
# On home, if not anonymous -> tenant_root
is_i18n_pattern = True
if (response.status_code == 404 and
is_i18n_pattern<|fim▁hole|> and not translation.get_language_from_path(request.path_info)
and self.is_language_prefix_patterns_used()):
urlconf = getattr(request, 'urlconf', None)
language_path = '/%s%s' % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
if (not path_valid and settings.APPEND_SLASH
and not language_path.endswith('/')):
path_valid = is_valid_path("%s/" % language_path, urlconf)
if path_valid:
language_url = "%s://%s/%s%s" % (
request.is_secure() and 'https' or 'http',
request.get_host(), language, request.get_full_path())
return HttpResponseRedirect(language_url)
translation.deactivate()
patch_vary_headers(response, ('Accept-Language',))
if 'Content-Language' not in response:
response['Content-Language'] = language
return response
class VosaeCorsMiddleware(CorsMiddleware):
"""Middleware which adds headers for every API requests"""
def process_request(self, request):
if request.path.startswith('/api/'):
return super(VosaeCorsMiddleware, self).process_request(request)
return None
def process_response(self, request, response):
if request.path.startswith('/api/'):
return super(VosaeCorsMiddleware, self).process_response(request, response)
return response<|fim▁end|>
| |
<|file_name|>cache.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from restclients.mock_http import MockHTTP
from myuw.util.cache_implementation import MyUWCache
from restclients.models import CacheEntryTimed
from datetime import timedelta
CACHE = 'myuw.util.cache_implementation.MyUWCache'
class TestCustomCachePolicy(TestCase):
def test_sws_default_policies(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
cache.processResponse("sws",
"/student/myuwcachetest1",
ok_response)<|fim▁hole|> self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="sws",
url="/student/myuwcachetest1")
# Cached response is returned after 3 hours and 58 minutes
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)-2))
cache_entry.save()
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 4 hours and 1 minute
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)+1))
cache_entry.save()
response = cache.getCache('sws', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
def test_sws_term_policy(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response, None)
cache.processResponse(
"sws", "/student/v5/term/1014,summer.json", ok_response)
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="sws", url="/student/v5/term/1014,summer.json")
# Cached response is returned after 29 days
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = orig_time_saved - timedelta(days=29)
cache_entry.save()
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 31 days
cache_entry.time_saved = orig_time_saved - timedelta(days=31)
cache_entry.save()
response = cache.getCache(
'sws', '/student/v5/term/1014,summer.json', {})
self.assertEquals(response, None)
def test_myplan_default(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('myplan', '/api/plan/xx', {})
self.assertEquals(response, None)
cache.processResponse("myplan", "/api/plan/xx", ok_response)
response = cache.getCache('myplan', '/api/plan/xx', {})
self.assertEquals(response, None)
def test_default_policies(self):
with self.settings(RESTCLIENTS_DAO_CACHE_CLASS=CACHE):
cache = MyUWCache()
ok_response = MockHTTP()
ok_response.status = 200
ok_response.data = "xx"
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response, None)
cache.processResponse(
"no_such", "/student/myuwcachetest1", ok_response)
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response["response"].data, 'xx')
cache_entry = CacheEntryTimed.objects.get(
service="no_such", url="/student/myuwcachetest1")
# Cached response is returned after 3 hours and 58 minutes
orig_time_saved = cache_entry.time_saved
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)-2))
cache_entry.save()
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertNotEquals(response, None)
# Cached response is not returned after 4 hours and 1 minute
cache_entry.time_saved = (orig_time_saved -
timedelta(minutes=(60 * 4)+1))
cache_entry.save()
response = cache.getCache('no_such', '/student/myuwcachetest1', {})
self.assertEquals(response, None)<|fim▁end|>
|
response = cache.getCache('sws', '/student/myuwcachetest1', {})
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod errors;
use common::Named;
use errors::*;
use fnv::{FnvHashMap, FnvHashSet};
use intern::string_key::{Intern, StringKey};
use lazy_static::lazy_static;
use rayon::prelude::*;
use regex::Regex;
use schema::{
EnumID, Field, FieldID, InputObjectID, Interface, SDLSchema, Schema, Type, TypeReference,<|fim▁hole|>use schema_print::{print_directive, print_type};
use std::time::Instant;
use std::{fmt::Write, sync::Mutex};
lazy_static! {
static ref INTROSPECTION_TYPES: FnvHashSet<StringKey> = vec![
"__Schema".intern(),
"__Directive".intern(),
"__DirectiveLocation".intern(),
"__Type".intern(),
"__Field".intern(),
"__InputValue".intern(),
"__EnumValue".intern(),
"__TypeKind".intern(),
]
.into_iter()
.collect();
static ref QUERY: StringKey = "Query".intern();
static ref SUBSCRIPTION: StringKey = "Subscription".intern();
static ref MUTATION: StringKey = "Mutation".intern();
static ref TYPE_NAME_REGEX: Regex = Regex::new(r"^[_a-zA-Z][_a-zA-Z0-9]*$").unwrap();
}
pub fn validate(schema: &SDLSchema) -> ValidationContext<'_> {
let mut validation_context = ValidationContext::new(schema);
validation_context.validate();
validation_context
}
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum ValidationContextType {
TypeNode(StringKey),
DirectiveNode(StringKey),
None,
}
impl ValidationContextType {
pub fn type_name(self) -> String {
match self {
ValidationContextType::DirectiveNode(type_name)
| ValidationContextType::TypeNode(type_name) => type_name.lookup().to_string(),
_ => "None".to_string(),
}
}
}
pub struct ValidationContext<'schema> {
pub schema: &'schema SDLSchema,
pub errors: Mutex<FnvHashMap<ValidationContextType, Vec<SchemaValidationError>>>,
}
impl<'schema> ValidationContext<'schema> {
pub fn new(schema: &'schema SDLSchema) -> Self {
Self {
schema,
errors: Mutex::new(FnvHashMap::default()),
}
}
fn validate(&mut self) {
let now = Instant::now();
self.validate_root_types();
self.validate_directives();
self.validate_types();
println!("Validated Schema in {}ms", now.elapsed().as_millis());
println!(
"Found {} validation errors",
self.errors.lock().unwrap().len()
)
}
fn validate_root_types(&mut self) {
self.validate_root_type(self.schema.query_type(), *QUERY);
self.validate_root_type(self.schema.subscription_type(), *SUBSCRIPTION);
self.validate_root_type(self.schema.mutation_type(), *MUTATION);
}
fn validate_root_type(&self, root_type: Option<Type>, type_name: StringKey) {
if let Some(type_) = root_type {
if !type_.is_object() {
self.report_error(
SchemaValidationError::InvalidRootType(type_name, type_),
ValidationContextType::TypeNode(type_name),
);
}
} else if type_name == *QUERY {
self.add_error(SchemaValidationError::MissingRootType(type_name));
}
}
fn validate_directives(&mut self) {
for directive in self.schema.get_directives() {
let context = ValidationContextType::DirectiveNode(directive.name);
self.validate_name(directive.name, context);
let mut arg_names = FnvHashSet::default();
for argument in directive.arguments.iter() {
self.validate_name(argument.name, context);
// Ensure unique arguments per directive.
if arg_names.contains(&argument.name) {
self.report_error(
SchemaValidationError::DuplicateArgument(argument.name, directive.name),
context,
);
continue;
}
arg_names.insert(argument.name);
}
}
}
fn validate_types(&mut self) {
let types = self.schema.get_type_map().collect::<Vec<_>>();
types.par_iter().for_each(|(type_name, type_)| {
// Ensure it is named correctly (excluding introspection types).
if !is_introspection_type(type_, **type_name) {
self.validate_name(**type_name, ValidationContextType::TypeNode(**type_name));
}
match type_ {
Type::Enum(id) => {
// Ensure Enums have valid values.
self.validate_enum_type(*id);
}
Type::InputObject(id) => {
// Ensure Input Object fields are valid.
self.validate_input_object_fields(*id);
}
Type::Interface(id) => {
let interface = self.schema.interface(*id);
// Ensure fields are valid
self.validate_fields(**type_name, &interface.fields);
// Validate cyclic references
if !self.validate_cyclic_implements_reference(interface) {
// Ensure interface implement the interfaces they claim to.
self.validate_type_with_interfaces(interface);
}
}
Type::Object(id) => {
let object = self.schema.object(*id);
// Ensure fields are valid
self.validate_fields(**type_name, &object.fields);
// Ensure objects implement the interfaces they claim to.
self.validate_type_with_interfaces(object);
}
Type::Union(id) => {
// Ensure Unions include valid member types.
self.validate_union_members(*id);
}
Type::Scalar(_id) => {}
};
});
}
fn validate_fields(&self, type_name: StringKey, fields: &[FieldID]) {
let context = ValidationContextType::TypeNode(type_name);
// Must define one or more fields.
if fields.is_empty() {
self.report_error(SchemaValidationError::TypeWithNoFields, context)
}
let mut field_names = FnvHashSet::default();
for field_id in fields {
let field = self.schema.field(*field_id);
if field_names.contains(&field.name) {
self.report_error(
SchemaValidationError::DuplicateField(field.name.item),
context,
);
continue;
}
field_names.insert(field.name);
// Ensure they are named correctly.
self.validate_name(field.name.item, context);
// Ensure the type is an output type
if !is_output_type(&field.type_) {
self.report_error(
SchemaValidationError::InvalidFieldType(
type_name,
field.name.item,
field.type_.clone(),
),
context,
)
}
let mut arg_names = FnvHashSet::default();
for argument in field.arguments.iter() {
// Ensure they are named correctly.
self.validate_name(argument.name, context);
// Ensure they are unique per field.
// Ensure unique arguments per directive.
if arg_names.contains(&argument.name) {
self.report_error(
SchemaValidationError::DuplicateArgument(argument.name, field.name.item),
context,
);
continue;
}
arg_names.insert(argument.name);
// Ensure the type is an input type
if !is_input_type(&argument.type_) {
self.report_error(
SchemaValidationError::InvalidArgumentType(
type_name,
field.name.item,
argument.name,
argument.type_.clone(),
),
context,
);
}
}
}
}
fn validate_union_members(&self, id: UnionID) {
let union = self.schema.union(id);
let context = ValidationContextType::TypeNode(union.name);
if union.members.is_empty() {
self.report_error(
SchemaValidationError::UnionWithNoMembers(union.name),
context,
);
}
let mut member_names = FnvHashSet::default();
for member in union.members.iter() {
let member_name = self.schema.object(*member).name.item;
if member_names.contains(&member_name) {
self.report_error(SchemaValidationError::DuplicateMember(member_name), context);
continue;
}
member_names.insert(member_name);
}
}
fn validate_enum_type(&self, id: EnumID) {
let enum_ = self.schema.enum_(id);
let context = ValidationContextType::TypeNode(enum_.name);
if enum_.values.is_empty() {
self.report_error(SchemaValidationError::EnumWithNoValues, context);
}
for value in enum_.values.iter() {
// Ensure valid name.
self.validate_name(value.value, context);
let value_name = value.value.lookup();
if value_name == "true" || value_name == "false" || value_name == "null" {
self.report_error(
SchemaValidationError::InvalidEnumValue(value.value),
context,
);
}
}
}
fn validate_input_object_fields(&self, id: InputObjectID) {
let input_object = self.schema.input_object(id);
let context = ValidationContextType::TypeNode(input_object.name);
if input_object.fields.is_empty() {
self.report_error(SchemaValidationError::TypeWithNoFields, context);
}
// Ensure the arguments are valid
for field in input_object.fields.iter() {
// Ensure they are named correctly.
self.validate_name(field.name, context);
// Ensure the type is an input type
if !is_input_type(&field.type_) {
self.report_error(
SchemaValidationError::InvalidArgumentType(
input_object.name,
field.name,
field.name,
field.type_.clone(),
),
context,
);
}
}
}
fn validate_type_with_interfaces<T: TypeWithFields + Named>(&self, type_: &T) {
let mut interface_names = FnvHashSet::default();
for interface_id in type_.interfaces().iter() {
let interface = self.schema.interface(*interface_id);
if interface_names.contains(&interface.name) {
self.report_error(
SchemaValidationError::DuplicateInterfaceImplementation(
type_.name(),
interface.name,
),
ValidationContextType::TypeNode(type_.name()),
);
continue;
}
interface_names.insert(interface.name);
self.validate_type_implements_interface(type_, interface);
}
}
fn validate_type_implements_interface<T: TypeWithFields + Named>(
&self,
type_: &T,
interface: &Interface,
) {
let object_field_map = self.field_map(type_.fields());
let interface_field_map = self.field_map(&interface.fields);
let context = ValidationContextType::TypeNode(type_.name());
// Assert each interface field is implemented.
for (field_name, interface_field) in interface_field_map {
// Assert interface field exists on object.
if !object_field_map.contains_key(&field_name) {
self.report_error(
SchemaValidationError::InterfaceFieldNotProvided(
interface.name,
field_name,
type_.name(),
),
context,
);
continue;
}
let object_field = object_field_map.get(&field_name).unwrap();
// Assert interface field type is satisfied by object field type, by being
// a valid subtype. (covariant)
if !self
.schema
.is_type_subtype_of(&object_field.type_, &interface_field.type_)
{
self.report_error(
SchemaValidationError::NotASubType(
interface.name,
field_name,
self.schema.get_type_name(interface_field.type_.inner()),
type_.name(),
self.schema.get_type_name(object_field.type_.inner()),
),
context,
);
}
// Assert each interface field arg is implemented.
for interface_argument in interface_field.arguments.iter() {
let object_argument = object_field
.arguments
.iter()
.find(|arg| arg.name == interface_argument.name);
// Assert interface field arg exists on object field.
if object_argument.is_none() {
self.report_error(
SchemaValidationError::InterfaceFieldArgumentNotProvided(
interface.name,
field_name,
interface_argument.name,
type_.name(),
),
context,
);
continue;
}
let object_argument = object_argument.unwrap();
// Assert interface field arg type matches object field arg type.
// (invariant)
// TODO: change to contravariant?
if interface_argument.type_ != object_argument.type_ {
self.report_error(
SchemaValidationError::NotEqualType(
interface.name,
field_name,
interface_argument.name,
self.schema.get_type_name(interface_argument.type_.inner()),
type_.name(),
self.schema.get_type_name(object_argument.type_.inner()),
),
context,
);
}
// TODO: validate default values?
}
// Assert additional arguments must not be required.
for object_argument in object_field.arguments.iter() {
if !interface_field.arguments.contains(object_argument.name)
&& object_argument.type_.is_non_null()
{
self.report_error(
SchemaValidationError::MissingRequiredArgument(
type_.name(),
field_name,
object_argument.name,
interface.name,
),
context,
);
}
}
}
}
fn validate_cyclic_implements_reference(&self, interface: &Interface) -> bool {
for id in interface.interfaces() {
let mut path = Vec::new();
let mut visited = FnvHashSet::default();
if self.has_path(
self.schema.interface(*id),
interface.name,
&mut path,
&mut visited,
) {
self.report_error(
SchemaValidationError::CyclicInterfaceInheritance(format!(
"{}->{}",
path.iter()
.map(|name| name.lookup())
.collect::<Vec<_>>()
.join("->"),
interface.name
)),
ValidationContextType::TypeNode(interface.name),
);
return true;
}
}
false
}
fn has_path(
&self,
root: &Interface,
target: StringKey,
path: &mut Vec<StringKey>,
visited: &mut FnvHashSet<StringKey>,
) -> bool {
if visited.contains(&root.name) {
return false;
}
if root.name == target {
return true;
}
path.push(root.name);
visited.insert(root.name);
for id in root.interfaces() {
if self.has_path(self.schema.interface(*id), target, path, visited) {
return true;
}
}
path.remove(path.len() - 1);
false
}
fn validate_name(&self, name: StringKey, context: ValidationContextType) {
let name = name.lookup();
let mut chars = name.chars();
if name.len() > 1 && chars.next() == Some('_') && chars.next() == Some('_') {
self.report_error(
SchemaValidationError::InvalidNamePrefix(name.to_string()),
context,
);
}
if !TYPE_NAME_REGEX.is_match(name) {
self.report_error(
SchemaValidationError::InvalidName(name.to_string()),
context,
);
}
}
fn field_map(&self, fields: &[FieldID]) -> FnvHashMap<StringKey, Field> {
fields
.iter()
.map(|id| self.schema.field(*id))
.map(|field| (field.name.item, field.clone()))
.collect::<FnvHashMap<_, _>>()
}
fn report_error(&self, error: SchemaValidationError, context: ValidationContextType) {
self.errors
.lock()
.unwrap()
.entry(context)
.or_insert_with(Vec::new)
.push(error);
}
fn add_error(&self, error: SchemaValidationError) {
self.report_error(error, ValidationContextType::None);
}
pub fn print_errors(&self) -> String {
let mut builder: String = String::new();
let errors = self.errors.lock().unwrap();
let mut contexts: Vec<_> = errors.keys().collect();
contexts.sort_by_key(|context| context.type_name());
for context in contexts {
match context {
ValidationContextType::None => writeln!(builder, "Errors:").unwrap(),
ValidationContextType::TypeNode(type_name) => writeln!(
builder,
"Type {} with definition:\n\t{}\nhad errors:",
type_name,
print_type(self.schema, self.schema.get_type(*type_name).unwrap()).trim_end()
)
.unwrap(),
ValidationContextType::DirectiveNode(directive_name) => writeln!(
builder,
"Directive {} with definition:\n\t{}\nhad errors:",
directive_name,
print_directive(
self.schema,
self.schema.get_directive(*directive_name).unwrap()
)
.trim_end()
)
.unwrap(),
}
let mut error_strings = errors
.get(context)
.unwrap()
.iter()
.map(|error| format!("\t* {}", error))
.collect::<Vec<_>>();
error_strings.sort();
writeln!(builder, "{}", error_strings.join("\n")).unwrap();
writeln!(builder).unwrap();
}
builder
}
}
fn is_named_type(type_: &Type) -> bool {
type_.is_enum()
|| type_.is_input_type()
|| type_.is_interface()
|| type_.is_object()
|| type_.is_scalar()
|| type_.is_union()
}
fn is_introspection_type(type_: &Type, type_name: StringKey) -> bool {
is_named_type(type_) && INTROSPECTION_TYPES.contains(&type_name)
}
fn is_output_type(type_: &TypeReference) -> bool {
let type_ = type_.inner();
type_.is_enum()
|| type_.is_interface()
|| type_.is_object()
|| type_.is_scalar()
|| type_.is_union()
}
fn is_input_type(type_: &TypeReference) -> bool {
let type_ = type_.inner();
type_.is_enum() || type_.is_input_type() || type_.is_scalar()
}<|fim▁end|>
|
TypeWithFields, UnionID,
};
|
<|file_name|>projectview.js<|end_file_name|><|fim▁begin|>define([], function() {
return Backbone.View.extend({
tagName: "a",
className: "projectlink",
attributes: {
href: "#"
},
template: _.template("<%- name %>"),
events: {<|fim▁hole|> },
initialize: function() {
this.listenTo(this.model, "change:selected", function(m, selected) {
this.$el.toggleClass("selected", selected);
});
this.listenTo(this.model, "change:color", function(m, color) {
this.$el.css("color", color);
});
},
render: function() {
this.$el.html(this.template(this.model.toJSON()));
return this;
},
toggleSelection: function() {
this.model.set("selected", !this.model.get("selected"));
return false;
}
});
});<|fim▁end|>
|
"click": "toggleSelection"
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod finite_state_automaton;<|fim▁hole|><|fim▁end|>
|
pub mod push_down_automaton;
pub mod tree_stack_automaton;
|
<|file_name|>pull-request-review-comment-deleted.event.ts<|end_file_name|><|fim▁begin|>export const pullRequestReviewCommentRemoved = {
'event': 'pull_request_review_comment',
'payload': {
'action': 'deleted',
'comment': {
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/comments/620286741',
'pull_request_review_id': 644659124,
'id': 620286741,
'node_id': 'MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDYyMDI4Njc0MQ==',
'diff_hunk': '@@ -0,0 +1 @@\n+console.log(\'hello world\');',
'path': 'test-event.ts',
'position': 1,
'original_position': 1,
'commit_id': 'b937b9965d0d8fd3f7ecc2cefbc8fba8931a4622',
'original_commit_id': 'b937b9965d0d8fd3f7ecc2cefbc8fba8931a4622',
'user': {
'login': 'jaguarBob',
'id': 25396889,
'node_id': 'MDQ6VXNlcjI1Mzk2ODg5',
'avatar_url': 'https://avatars.githubusercontent.com/u/25396889?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/jaguarBob',
'html_url': 'https://github.com/jaguarBob',
'followers_url': 'https://api.github.com/users/jaguarBob/followers',
'following_url': 'https://api.github.com/users/jaguarBob/following{/other_user}',
'gists_url': 'https://api.github.com/users/jaguarBob/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/jaguarBob/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/jaguarBob/subscriptions',
'organizations_url': 'https://api.github.com/users/jaguarBob/orgs',
'repos_url': 'https://api.github.com/users/jaguarBob/repos',
'events_url': 'https://api.github.com/users/jaguarBob/events{/privacy}',
'received_events_url': 'https://api.github.com/users/jaguarBob/received_events',
'type': 'User',
'site_admin': false
},
'body': 'this is my review comment!\r\n\r\nand now it is edited!',
'created_at': '2021-04-26T13:17:34Z',
'updated_at': '2021-04-26T13:26:59Z',
'html_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events/pull/1#discussion_r620286741',
'pull_request_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1',
'author_association': 'COLLABORATOR',
'_links': {
'self': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/comments/620286741'
},
'html': {
'href': 'https://github.com/Thatkookooguy/test-new-achievibit-events/pull/1#discussion_r620286741'
},
'pull_request': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1'
}
},
'start_line': null,
'original_start_line': null,
'start_side': null,
'line': 1,
'original_line': 1,
'side': 'RIGHT'
},
'pull_request': {
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1',
'id': 352423539,
'node_id': 'MDExOlB1bGxSZXF1ZXN0MzUyNDIzNTM5',
'html_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events/pull/1',
'diff_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events/pull/1.diff',
'patch_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events/pull/1.patch',
'issue_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/1',
'number': 1,
'state': 'open',
'locked': false,
'title': 'Create test-event.ts - title changed!',
'user': {
'login': 'Thatkookooguy',
'id': 10427304,
'node_id': 'MDQ6VXNlcjEwNDI3MzA0',
'avatar_url': 'https://avatars.githubusercontent.com/u/10427304?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/Thatkookooguy',
'html_url': 'https://github.com/Thatkookooguy',
'followers_url': 'https://api.github.com/users/Thatkookooguy/followers',
'following_url': 'https://api.github.com/users/Thatkookooguy/following{/other_user}',
'gists_url': 'https://api.github.com/users/Thatkookooguy/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/Thatkookooguy/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/Thatkookooguy/subscriptions',
'organizations_url': 'https://api.github.com/users/Thatkookooguy/orgs',
'repos_url': 'https://api.github.com/users/Thatkookooguy/repos',
'events_url': 'https://api.github.com/users/Thatkookooguy/events{/privacy}',
'received_events_url': 'https://api.github.com/users/Thatkookooguy/received_events',
'type': 'User',
'site_admin': false
},
'body': '',
'created_at': '2019-12-12T13:48:34Z',
'updated_at': '2021-04-26T13:26:59Z',
'closed_at': null,
'merged_at': null,
'merge_commit_sha': '93fc62614a3e073103f638960e9639799d399984',
'assignee': null,
'assignees': [],
'requested_reviewers': [],
'requested_teams': [],
'labels': [
{
'id': 1730326714,
'node_id': 'MDU6TGFiZWwxNzMwMzI2NzE0',
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/labels/documentation',
'name': 'documentation',
'color': '0075ca',
'default': true,
'description': 'Improvements or additions to documentation'
},
{
'id': 1730326716,
'node_id': 'MDU6TGFiZWwxNzMwMzI2NzE2',
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/labels/duplicate',
'name': 'duplicate',
'color': 'cfd3d7',
'default': true,
'description': 'This issue or pull request already exists'
},
{
'id': 1730326717,
'node_id': 'MDU6TGFiZWwxNzMwMzI2NzE3',
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/labels/enhancement',
'name': 'enhancement',
'color': 'a2eeef',
'default': true,
'description': 'New feature or request'
}
],
'milestone': null,
'draft': false,
'commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1/commits',
'review_comments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1/comments',
'review_comment_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/comments{/number}',
'comments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/1/comments',
'statuses_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/statuses/b937b9965d0d8fd3f7ecc2cefbc8fba8931a4622',
'head': {
'label': 'Thatkookooguy:Thatkookooguy-patch-1',
'ref': 'Thatkookooguy-patch-1',
'sha': 'b937b9965d0d8fd3f7ecc2cefbc8fba8931a4622',
'user': {
'login': 'Thatkookooguy',
'id': 10427304,
'node_id': 'MDQ6VXNlcjEwNDI3MzA0',
'avatar_url': 'https://avatars.githubusercontent.com/u/10427304?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/Thatkookooguy',
'html_url': 'https://github.com/Thatkookooguy',
'followers_url': 'https://api.github.com/users/Thatkookooguy/followers',
'following_url': 'https://api.github.com/users/Thatkookooguy/following{/other_user}',
'gists_url': 'https://api.github.com/users/Thatkookooguy/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/Thatkookooguy/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/Thatkookooguy/subscriptions',
'organizations_url': 'https://api.github.com/users/Thatkookooguy/orgs',
'repos_url': 'https://api.github.com/users/Thatkookooguy/repos',
'events_url': 'https://api.github.com/users/Thatkookooguy/events{/privacy}',
'received_events_url': 'https://api.github.com/users/Thatkookooguy/received_events',
'type': 'User',
'site_admin': false
},
'repo': {
'id': 227616181,
'node_id': 'MDEwOlJlcG9zaXRvcnkyMjc2MTYxODE=',
'name': 'test-new-achievibit-events',
'full_name': 'Thatkookooguy/test-new-achievibit-events',
'private': false,
'owner': {
'login': 'Thatkookooguy',
'id': 10427304,
'node_id': 'MDQ6VXNlcjEwNDI3MzA0',
'avatar_url': 'https://avatars.githubusercontent.com/u/10427304?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/Thatkookooguy',
'html_url': 'https://github.com/Thatkookooguy',
'followers_url': 'https://api.github.com/users/Thatkookooguy/followers',
'following_url': 'https://api.github.com/users/Thatkookooguy/following{/other_user}',
'gists_url': 'https://api.github.com/users/Thatkookooguy/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/Thatkookooguy/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/Thatkookooguy/subscriptions',
'organizations_url': 'https://api.github.com/users/Thatkookooguy/orgs',
'repos_url': 'https://api.github.com/users/Thatkookooguy/repos',
'events_url': 'https://api.github.com/users/Thatkookooguy/events{/privacy}',
'received_events_url': 'https://api.github.com/users/Thatkookooguy/received_events',
'type': 'User',
'site_admin': false
},
'html_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events',
'description': null,
'fork': false,
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events',
'forks_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/forks',
'keys_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/keys{/key_id}',
'collaborators_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/collaborators{/collaborator}',
'teams_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/teams',
'hooks_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/hooks',
'issue_events_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/events{/number}',
'events_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/events',
'assignees_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/assignees{/user}',
'branches_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/branches{/branch}',
'tags_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/tags',
'blobs_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/blobs{/sha}',
'git_tags_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/tags{/sha}',
'git_refs_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/refs{/sha}',
'trees_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/trees{/sha}',
'statuses_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/statuses/{sha}',
'languages_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/languages',
'stargazers_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/stargazers',
'contributors_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/contributors',
'subscribers_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/subscribers',
'subscription_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/subscription',
'commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/commits{/sha}',
'git_commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/commits{/sha}',
'comments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/comments{/number}',
'issue_comment_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/comments{/number}',
'contents_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/contents/{+path}',
'compare_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/compare/{base}...{head}',
'merges_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/merges',
'archive_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/{archive_format}{/ref}',
'downloads_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/downloads',
'issues_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues{/number}',
'pulls_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls{/number}',
'milestones_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/milestones{/number}',
'notifications_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/notifications{?since,all,participating}',
'labels_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/labels{/name}',
'releases_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/releases{/id}',
'deployments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/deployments',
'created_at': '2019-12-12T13:41:55Z',
'updated_at': '2019-12-26T14:00:47Z',
'pushed_at': '2019-12-26T14:00:45Z',
'git_url': 'git://github.com/Thatkookooguy/test-new-achievibit-events.git',
'ssh_url': '[email protected]:Thatkookooguy/test-new-achievibit-events.git',
'clone_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events.git',
'svn_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events',
'homepage': null,
'size': 65,
'stargazers_count': 0,
'watchers_count': 0,
'language': 'HTML',
'has_issues': true,
'has_projects': true,
'has_downloads': true,
'has_wiki': true,
'has_pages': false,
'forks_count': 0,
'mirror_url': null,
'archived': false,
'disabled': false,
'open_issues_count': 2,
'license': null,
'forks': 0,
'open_issues': 2,
'watchers': 0,
'default_branch': 'master',
'allow_squash_merge': true,
'allow_merge_commit': true,
'allow_rebase_merge': true,
'delete_branch_on_merge': false
}
},
'base': {
'label': 'Thatkookooguy:master',
'ref': 'master',
'sha': '1373147968cd5101404f624ef517c12b5c48be24',
'user': {
'login': 'Thatkookooguy',
'id': 10427304,
'node_id': 'MDQ6VXNlcjEwNDI3MzA0',
'avatar_url': 'https://avatars.githubusercontent.com/u/10427304?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/Thatkookooguy',
'html_url': 'https://github.com/Thatkookooguy',
'followers_url': 'https://api.github.com/users/Thatkookooguy/followers',
'following_url': 'https://api.github.com/users/Thatkookooguy/following{/other_user}',
'gists_url': 'https://api.github.com/users/Thatkookooguy/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/Thatkookooguy/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/Thatkookooguy/subscriptions',
'organizations_url': 'https://api.github.com/users/Thatkookooguy/orgs',
'repos_url': 'https://api.github.com/users/Thatkookooguy/repos',
'events_url': 'https://api.github.com/users/Thatkookooguy/events{/privacy}',
'received_events_url': 'https://api.github.com/users/Thatkookooguy/received_events',
'type': 'User',
'site_admin': false
},
'repo': {
'id': 227616181,
'node_id': 'MDEwOlJlcG9zaXRvcnkyMjc2MTYxODE=',
'name': 'test-new-achievibit-events',
'full_name': 'Thatkookooguy/test-new-achievibit-events',
'private': false,
'owner': {
'login': 'Thatkookooguy',
'id': 10427304,
'node_id': 'MDQ6VXNlcjEwNDI3MzA0',
'avatar_url': 'https://avatars.githubusercontent.com/u/10427304?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/Thatkookooguy',
'html_url': 'https://github.com/Thatkookooguy',
'followers_url': 'https://api.github.com/users/Thatkookooguy/followers',
'following_url': 'https://api.github.com/users/Thatkookooguy/following{/other_user}',
'gists_url': 'https://api.github.com/users/Thatkookooguy/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/Thatkookooguy/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/Thatkookooguy/subscriptions',
'organizations_url': 'https://api.github.com/users/Thatkookooguy/orgs',
'repos_url': 'https://api.github.com/users/Thatkookooguy/repos',
'events_url': 'https://api.github.com/users/Thatkookooguy/events{/privacy}',
'received_events_url': 'https://api.github.com/users/Thatkookooguy/received_events',
'type': 'User',
'site_admin': false
},
'html_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events',
'description': null,
'fork': false,
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events',
'forks_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/forks',
'keys_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/keys{/key_id}',
'collaborators_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/collaborators{/collaborator}',
'teams_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/teams',
'hooks_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/hooks',
'issue_events_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/events{/number}',
'events_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/events',
'assignees_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/assignees{/user}',
'branches_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/branches{/branch}',
'tags_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/tags',<|fim▁hole|> 'trees_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/trees{/sha}',
'statuses_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/statuses/{sha}',
'languages_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/languages',
'stargazers_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/stargazers',
'contributors_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/contributors',
'subscribers_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/subscribers',
'subscription_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/subscription',
'commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/commits{/sha}',
'git_commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/commits{/sha}',
'comments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/comments{/number}',
'issue_comment_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/comments{/number}',
'contents_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/contents/{+path}',
'compare_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/compare/{base}...{head}',
'merges_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/merges',
'archive_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/{archive_format}{/ref}',
'downloads_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/downloads',
'issues_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues{/number}',
'pulls_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls{/number}',
'milestones_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/milestones{/number}',
'notifications_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/notifications{?since,all,participating}',
'labels_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/labels{/name}',
'releases_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/releases{/id}',
'deployments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/deployments',
'created_at': '2019-12-12T13:41:55Z',
'updated_at': '2019-12-26T14:00:47Z',
'pushed_at': '2019-12-26T14:00:45Z',
'git_url': 'git://github.com/Thatkookooguy/test-new-achievibit-events.git',
'ssh_url': '[email protected]:Thatkookooguy/test-new-achievibit-events.git',
'clone_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events.git',
'svn_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events',
'homepage': null,
'size': 65,
'stargazers_count': 0,
'watchers_count': 0,
'language': 'HTML',
'has_issues': true,
'has_projects': true,
'has_downloads': true,
'has_wiki': true,
'has_pages': false,
'forks_count': 0,
'mirror_url': null,
'archived': false,
'disabled': false,
'open_issues_count': 2,
'license': null,
'forks': 0,
'open_issues': 2,
'watchers': 0,
'default_branch': 'master',
'allow_squash_merge': true,
'allow_merge_commit': true,
'allow_rebase_merge': true,
'delete_branch_on_merge': false
}
},
'_links': {
'self': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1'
},
'html': {
'href': 'https://github.com/Thatkookooguy/test-new-achievibit-events/pull/1'
},
'issue': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/1'
},
'comments': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/1/comments'
},
'review_comments': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1/comments'
},
'review_comment': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/comments{/number}'
},
'commits': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls/1/commits'
},
'statuses': {
'href': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/statuses/b937b9965d0d8fd3f7ecc2cefbc8fba8931a4622'
}
},
'author_association': 'OWNER',
'auto_merge': null,
'active_lock_reason': null
},
'repository': {
'id': 227616181,
'node_id': 'MDEwOlJlcG9zaXRvcnkyMjc2MTYxODE=',
'name': 'test-new-achievibit-events',
'full_name': 'Thatkookooguy/test-new-achievibit-events',
'private': false,
'owner': {
'login': 'Thatkookooguy',
'id': 10427304,
'node_id': 'MDQ6VXNlcjEwNDI3MzA0',
'avatar_url': 'https://avatars.githubusercontent.com/u/10427304?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/Thatkookooguy',
'html_url': 'https://github.com/Thatkookooguy',
'followers_url': 'https://api.github.com/users/Thatkookooguy/followers',
'following_url': 'https://api.github.com/users/Thatkookooguy/following{/other_user}',
'gists_url': 'https://api.github.com/users/Thatkookooguy/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/Thatkookooguy/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/Thatkookooguy/subscriptions',
'organizations_url': 'https://api.github.com/users/Thatkookooguy/orgs',
'repos_url': 'https://api.github.com/users/Thatkookooguy/repos',
'events_url': 'https://api.github.com/users/Thatkookooguy/events{/privacy}',
'received_events_url': 'https://api.github.com/users/Thatkookooguy/received_events',
'type': 'User',
'site_admin': false
},
'html_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events',
'description': null,
'fork': false,
'url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events',
'forks_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/forks',
'keys_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/keys{/key_id}',
'collaborators_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/collaborators{/collaborator}',
'teams_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/teams',
'hooks_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/hooks',
'issue_events_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/events{/number}',
'events_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/events',
'assignees_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/assignees{/user}',
'branches_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/branches{/branch}',
'tags_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/tags',
'blobs_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/blobs{/sha}',
'git_tags_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/tags{/sha}',
'git_refs_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/refs{/sha}',
'trees_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/trees{/sha}',
'statuses_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/statuses/{sha}',
'languages_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/languages',
'stargazers_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/stargazers',
'contributors_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/contributors',
'subscribers_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/subscribers',
'subscription_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/subscription',
'commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/commits{/sha}',
'git_commits_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/commits{/sha}',
'comments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/comments{/number}',
'issue_comment_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues/comments{/number}',
'contents_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/contents/{+path}',
'compare_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/compare/{base}...{head}',
'merges_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/merges',
'archive_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/{archive_format}{/ref}',
'downloads_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/downloads',
'issues_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/issues{/number}',
'pulls_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/pulls{/number}',
'milestones_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/milestones{/number}',
'notifications_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/notifications{?since,all,participating}',
'labels_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/labels{/name}',
'releases_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/releases{/id}',
'deployments_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/deployments',
'created_at': '2019-12-12T13:41:55Z',
'updated_at': '2019-12-26T14:00:47Z',
'pushed_at': '2019-12-26T14:00:45Z',
'git_url': 'git://github.com/Thatkookooguy/test-new-achievibit-events.git',
'ssh_url': '[email protected]:Thatkookooguy/test-new-achievibit-events.git',
'clone_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events.git',
'svn_url': 'https://github.com/Thatkookooguy/test-new-achievibit-events',
'homepage': null,
'size': 65,
'stargazers_count': 0,
'watchers_count': 0,
'language': 'HTML',
'has_issues': true,
'has_projects': true,
'has_downloads': true,
'has_wiki': true,
'has_pages': false,
'forks_count': 0,
'mirror_url': null,
'archived': false,
'disabled': false,
'open_issues_count': 2,
'license': null,
'forks': 0,
'open_issues': 2,
'watchers': 0,
'default_branch': 'master'
},
'sender': {
'login': 'jaguarBob',
'id': 25396889,
'node_id': 'MDQ6VXNlcjI1Mzk2ODg5',
'avatar_url': 'https://avatars.githubusercontent.com/u/25396889?v=4',
'gravatar_id': '',
'url': 'https://api.github.com/users/jaguarBob',
'html_url': 'https://github.com/jaguarBob',
'followers_url': 'https://api.github.com/users/jaguarBob/followers',
'following_url': 'https://api.github.com/users/jaguarBob/following{/other_user}',
'gists_url': 'https://api.github.com/users/jaguarBob/gists{/gist_id}',
'starred_url': 'https://api.github.com/users/jaguarBob/starred{/owner}{/repo}',
'subscriptions_url': 'https://api.github.com/users/jaguarBob/subscriptions',
'organizations_url': 'https://api.github.com/users/jaguarBob/orgs',
'repos_url': 'https://api.github.com/users/jaguarBob/repos',
'events_url': 'https://api.github.com/users/jaguarBob/events{/privacy}',
'received_events_url': 'https://api.github.com/users/jaguarBob/received_events',
'type': 'User',
'site_admin': false
}
}
}<|fim▁end|>
|
'blobs_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/blobs{/sha}',
'git_tags_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/tags{/sha}',
'git_refs_url': 'https://api.github.com/repos/Thatkookooguy/test-new-achievibit-events/git/refs{/sha}',
|
<|file_name|>score.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> score: number;
createdAt: Date;
}<|fim▁end|>
|
export interface Score {
name: string;
|
<|file_name|>construct-string-from-binary-tree.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} t
* @return {string}
*/
const tree2str = (t) => {
let ret = '';
if (t) {
ret += t.val;
if (t.left || t.right) {
ret += '(';
ret += tree2str(t.left);
ret += ')';
}
if (t.right) {
ret += `(${tree2str(t.right)})`;
}
}
return ret;
};<|fim▁end|>
|
* Definition for a binary tree node.
* function TreeNode(val) {
|
<|file_name|>baserunner_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: [email protected] (Mingyu Wu)
"""Unittest for baserunner module."""
__author__ = '[email protected] (Mingyu Wu)'
import os
import shutil
import sys
import tempfile
import time
import unittest
from lib import baserunner
from lib import filesystemhandlerextend
from lib import mock_emailmessage
from lib import mock_reporter
from lib import mock_scanscripts
from lib import pyreringconfig
from lib import pyreringutil
global_settings = pyreringconfig.GlobalPyreRingConfig.settings<|fim▁hole|>
def setUp(self):
# I should config global_settings here instead of read it from file system.
self.tempdir = tempfile.mkdtemp()
root_dir = os.path.abspath(os.path.join(os.path.split(sys.argv[0])[0],
'../'))
global_settings.update(
{'report_dir': os.path.join(self.tempdir, 'report'),
'email_recipients': os.getenv('LOGNAME'),
'host_name': 'test.host',
'log_file': 'pyrering.log',
'file_errors': False,
'project_name': 'pyrering_unittest',
'root_dir': root_dir,
'sendmail': False,
'runner': 'baserunner',
'source_dir': os.path.join(root_dir, 'test'),
'tester': os.getenv('LOGNAME'),
'FATAL_STRING': 'Fatal:',
'header_file': 'header_info.txt',
'time': time.strftime('%Y%m%d%H%M'),
'skip_setup': False,
})
# get a default config and mocks
self.one_config = pyreringutil.PRConfigParser().Default()
self.scanner = mock_scanscripts.MockScanScripts()
self.emailmessage = mock_emailmessage.MockEmailMessage()
self.reporter = mock_reporter.MockTxtReporter()
self.runner = baserunner.BaseRunner(
name='test',
scanner=self.scanner,
email_message=self.emailmessage,
filesystem=filesystemhandlerextend.FileSystemHandlerExtend(),
reporter=self.reporter)
self.runner.Prepare()
if not os.path.isdir(global_settings['report_dir']):
os.makedirs(global_settings['report_dir'])
# I don't want the unit test to mess with the original log file.
global_settings['log_file'] += '.unittest'
def tearDown(self):
self.runner.CleanUp()
self.runner = ''
pyreringconfig.Reset()
self.scanner.CleanConfig()
shutil.rmtree(self.tempdir)
def testFindHeaderInfoFile(self):
global_settings['header_file'] = os.path.join(self.tempdir, 'header.txt')
fh = open(global_settings['header_file'], 'w')
fh.write('test info')
fh.close()
self.one_config['TEST_SCRIPT'] = 'echo 1'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testFindHeaderInfoFile'], False)
self.assertEqual(self.reporter.header, 'test info')
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
# Positive Test Cases:
def testOneCommand(self):
"""A simple sleep command takes some time to finish."""
# prepare the test script here
self.one_config['TEST_SCRIPT'] = 'sleep 3'
# set the mock scanscript to return this thing.
self.scanner.SetConfig([self.one_config])
# now run the test and return should be expected.
result = self.runner.Run(['testOneCommand'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
def testEchoCommand(self):
"""A simple command has output on stdout."""
self.one_config['TEST_SCRIPT'] = 'echo testEchoCommand'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testEchoCommand'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
#TODO(mwu): need to check the log file has this hello line
def testEchoToSTDERRCommand(self):
"""A simple command has output redirect to stderr."""
self.one_config['TEST_SCRIPT'] = 'echo testEchoToSTDERRCommand >&2'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testEchoSTDERRCommand'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
#TODO(mwu): need to check the log file has this hello line
def testRunScript(self):
"""A real script to run."""
self.one_config['TEST_SCRIPT'] = os.path.join(global_settings['root_dir'],
'test/test1_echo.sh')
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testRunScript'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
#TODO(mwu): need to check the log file has the echo output
def testRunScripts(self):
"""2 scripts to be run."""
self.one_config['TEST_SCRIPT'] = 'echo testRunScripts1'
config2 = pyreringutil.PRConfigParser().Default()
config2['TEST_SCRIPT'] = 'echo testRunScripts2'
self.scanner.SetConfig([self.one_config, config2])
result = self.runner.Run(['testRunScripts'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 2)
# TODO(mwu): verify both scripts run fine
def testEmailSend(self):
"""Test Email should be send."""
self.one_config['TEST_SCRIPT'] = 'echo send_email_test;exit 1'
self.scanner.SetConfig([self.one_config])
try:
self.runner.Run(['testEmailSend'], True)
except self.emailmessage.EmailCalledError:
self.assertTrue(True)
else:
self.fail(msg='Send email was not called')
def testEmailNotSendIfTestPass(self):
"""Test email should not go if all tests pass."""
self.one_config['TEST_SCRIPT'] = 'echo send_email_test;exit 0'
self.scanner.SetConfig([self.one_config])
try:
self.runner.Run(['testEmailSend'], True)
except self.emailmessage.EmailCalledError:
self.fail()
# Negative Test Cases
def testTimeoutCommand(self):
"""A command times out."""
self.one_config['TEST_SCRIPT'] = 'echo timeouttest; sleep 8'
self.one_config['TIMEOUT'] = 2
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testTimeoutCommand'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.timeout, 1)
def testNonExistCommand(self):
"""Test a wrong system command."""
self.one_config['TEST_SCRIPT'] = 'nonexist_command'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testNonExistCommand'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.failed, 1)
def testNonExistScript(self):
"""Test a nonexist script."""
self.one_config['TEST_SCRIPT'] = '/tmp/nonexist_script.sh'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testNonExistScript'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.failed, 1)
def testPermissionDenied(self):
"""Test something without permission."""
self.one_config['TEST_SCRIPT'] = 'touch /pyrering.txt'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testPermissionDenied'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.failed, 1)
def testCatchWarningMessage(self):
"""Test a command has warning output."""
self.one_config['TEST_SCRIPT'] = 'echo warn message'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testCatchWarningMessage'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
def testCatchFatalMessage(self):
"""Test a command has fatal error message even exit code still 0."""
self.one_config['TEST_SCRIPT'] = 'echo Fatal:;echo anotherline'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testCatchFatalMessage'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.failed, 1)
def testOutputLargeMessage(self):
"""Test a test can have large screen output.
As default the stdout only has a 4k buffer limit, so the code should clean
up the buffer while running the test, otherwise the writing to buffer will
be blocked when the buffer is full.
"""
self.one_config['TEST_SCRIPT'] = os.path.join(global_settings['root_dir'],
'test/outputlargetxt.py')
self.one_config['TIMEOUT'] = 4
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testLargeOutput'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.passed, 1)
def testExitWithError(self):
"""Test a test have an error exit, which is not a failure."""
self.one_config['TEST_SCRIPT'] = 'exit 255'
self.scanner.SetConfig([self.one_config])
result = self.runner.Run(['testExitWithError'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.failed, 0)
self.assertEqual(self.runner.error, 1)
def testSetupTestPassed(self):
"""Test a setup test case passes."""
self.one_config['TEST_SCRIPT'] = 'exit 0'
self.scanner.SetConfig([self.one_config])
config2 = pyreringutil.PRConfigParser().Default()
config2['TEST_SCRIPT'] = 'exit 0'
self.scanner.SetConfig([config2], 'setup')
result = self.runner.Run(['testSetupTestFailed'], False)
self.assertEqual(result, 0)
self.assertEqual(self.runner.failed, 0)
def testSetupTestFailed(self):
"""Test a setup test case failed, the test should exit at once."""
self.one_config['TEST_SCRIPT'] = 'exit 0'
self.scanner.SetConfig([self.one_config])
config2 = pyreringutil.PRConfigParser().Default()
config2['TEST_SCRIPT'] = 'exit 1'
self.scanner.SetConfig([config2], 'setup')
result = self.runner.Run(['testSetupTestFailed'], False)
self.assertEqual(result, 1)
self.assertEqual(self.runner.failed, 1)
def testTearDownFailed(self):
"""Test a teardown test case failed, the test still reports."""
self.one_config['TEST_SCRIPT'] = 'exit 0'
self.scanner.SetConfig([self.one_config])
config2 = pyreringutil.PRConfigParser().Default()
config2['TEST_SCRIPT'] = 'exit 1'
self.scanner.SetConfig([config2], 'teardown')
result = self.runner.Run(['testTearDownTestFailed'], False)
self.assertEqual(result, 4)
self.assertEqual(self.runner.failed, 4)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
class BaseRunnerTest(unittest.TestCase):
"""Unit test cases for BaseRunner class."""
|
<|file_name|>acfun.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
<|fim▁hole|>from .letv import letvcloud_download_by_vu
from .qq import qq_download_by_vid
from .sina import sina_download_by_vid
from .tudou import tudou_download_by_iid
from .youku import youku_download_by_vid
import json, re
def get_srt_json(id):
url = 'http://danmu.aixifan.com/V2/%s' % id
return get_html(url)
def acfun_download_by_vid(vid, title, output_dir='.', merge=True, info_only=False, **kwargs):
info = json.loads(get_html('http://www.acfun.tv/video/getVideo.aspx?id=' + vid))
sourceType = info['sourceType']
if 'sourceId' in info: sourceId = info['sourceId']
# danmakuId = info['danmakuId']
if sourceType == 'sina':
sina_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'youku':
youku_download_by_vid(sourceId, title=title, output_dir=output_dir, merge=merge, info_only=info_only, **kwargs)
elif sourceType == 'tudou':
tudou_download_by_iid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'qq':
qq_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'letv':
letvcloud_download_by_vu(sourceId, '2d8c027396', title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'zhuzhan':
a = 'http://api.aixifan.com/plays/%s/realSource' % vid
s = json.loads(get_content(a, headers={'deviceType': '1'}))
urls = s['data']['files'][-1]['url']
size = urls_size(urls)
print_info(site_info, title, 'mp4', size)
if not info_only:
download_urls(urls, title, 'mp4', size,
output_dir=output_dir, merge=merge)
else:
raise NotImplementedError(sourceType)
if not info_only and not dry_run:
if not kwargs['caption']:
print('Skipping danmaku.')
return
try:
title = get_filename(title)
print('Downloading %s ...\n' % (title + '.cmt.json'))
cmt = get_srt_json(vid)
with open(os.path.join(output_dir, title + '.cmt.json'), 'w', encoding='utf-8') as x:
x.write(cmt)
except:
pass
def acfun_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
assert re.match(r'http://[^\.]+.acfun.[^\.]+/\D/\D\D(\d+)', url)
html = get_html(url)
title = r1(r'<h1 id="txt-title-view">([^<>]+)<', html)
title = unescape_html(title)
title = escape_file_path(title)
assert title
videos = re.findall("data-vid=\"(\d+)\".*href=\"[^\"]+\".*title=\"([^\"]+)\"", html)
for video in videos:
p_vid = video[0]
p_title = title + " - " + video[1] if video[1] != '删除标签' else title
acfun_download_by_vid(p_vid, p_title,
output_dir=output_dir,
merge=merge,
info_only=info_only,
**kwargs)
site_info = "AcFun.tv"
download = acfun_download
download_playlist = playlist_not_supported('acfun')<|fim▁end|>
|
__all__ = ['acfun_download']
from ..common import *
|
<|file_name|>migrate_analytics.py<|end_file_name|><|fim▁begin|># A script to migrate old keen analytics to a new collection, generate in-between points for choppy
# data, or a little of both
import os
import csv
import copy
import pytz
import logging
import argparse
import datetime
from dateutil.parser import parse
from keen.client import KeenClient
from website.settings import KEEN as keen_settings
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
VERY_LONG_TIMEFRAME = 'this_20_years'
def parse_args():
parser = argparse.ArgumentParser(
description='Enter a start date and end date to gather, smooth, and send back analytics for keen'
)
parser.add_argument('-s', '--start', dest='start_date')
parser.add_argument('-e', '--end', dest='end_date')
parser.add_argument('-t', '--transfer', dest='transfer_collection', action='store_true')
parser.add_argument('-sc', '--source', dest='source_collection')
parser.add_argument('-dc', '--destination', dest='destination_collection')
parser.add_argument('-sm', '--smooth', dest='smooth_events', action='store_true')
parser.add_argument('-o', '--old', dest='old_analytics', action='store_true')
parser.add_argument('-d', '--dry', dest='dry', action='store_true')
parser.add_argument('-r', '--reverse', dest='reverse', action='store_true')
parser.add_argument('-re', '--removeevent', dest='remove_event')
parsed = parser.parse_args()
validate_args(parsed)
return parsed
def validate_args(args):
""" Go through supplied command line args an determine if you have enough to continue
:param args: argparse args object, to sift through and figure out if you need more info
:return: None, just raise errors if it finds something wrong
"""
if args.dry:
logger.info('Running analytics on DRY RUN mode! No data will actually be sent to Keen.')
potential_operations = [args.smooth_events, args.transfer_collection, args.old_analytics]
if len([arg for arg in potential_operations if arg]) > 1:
raise ValueError('You may only choose one analytic type to run: transfer, smooth, or import old analytics.')
if args.smooth_events and not (args.start_date and args.end_date):
raise ValueError('To smooth data, please enter both a start date and end date.')
if args.start_date and args.end_date:
if parse(args.start_date) > parse(args.end_date):
raise ValueError('Please enter an end date that is after the start date.')
if args.smooth_events and not args.source_collection:
raise ValueError('Please specify a source collection to smooth data from.')
if args.transfer_collection and not (args.source_collection and args.destination_collection):
raise ValueError('To transfer between keen collections, enter both a source and a destination collection.')
if any([args.start_date, args.end_date]) and not all([args.start_date, args.end_date]):
raise ValueError('You must provide both a start and an end date if you provide either.')
if args.remove_event and not args.source_collection:
raise ValueError('You must provide both a source collection to remove an event from.')
def fill_in_event_gaps(collection_name, events):
""" A method to help fill in gaps between events that might be far apart,
so that one event happens per day.
:param collection_name: keen collection events are from
:param events: events to fill in gaps between
:return: list of "generated and estimated" events to send that will fill in gaps.
"""
given_days = [parse(event['keen']['timestamp']).date() for event in events if not event.get('generated')]
given_days.sort()
date_chunks = [given_days[x-1:x+1] for x in range(1, len(given_days))]
events_to_add = []
if given_days:
if collection_name == 'addon_snapshot':
all_providers = list(set([event['provider']['name'] for event in events]))
for provider in all_providers:
for date_pair in date_chunks:
if date_pair[1] - date_pair[0] > datetime.timedelta(1) and date_pair[0] != date_pair[1]:
first_event = [
event for event in events if date_from_event_ts(event) == date_pair[0] and event['provider']['name'] == provider and not event.get('generated')
]
if first_event:
events_to_add += generate_events_between_events(date_pair, first_event[0])
elif collection_name == 'institution_summary':
all_instutitions = list(set([event['institution']['name'] for event in events]))
for institution in all_instutitions:
for date_pair in date_chunks:
if date_pair[1] - date_pair[0] > datetime.timedelta(1) and date_pair[0] != date_pair[1]:
first_event = [
event for event in events if date_from_event_ts(event) == date_pair[0] and event['institution']['name'] == institution and not event.get('generated')
]
if first_event:
events_to_add += generate_events_between_events(date_pair, first_event[0])
else:
for date_pair in date_chunks:
if date_pair[1] - date_pair[0] > datetime.timedelta(1) and date_pair[0] != date_pair[1]:
first_event = [event for event in events if date_from_event_ts(event) == date_pair[0] and not event.get('generated')]
if first_event:
events_to_add += generate_events_between_events(date_pair, first_event[0])
logger.info('Generated {} events to add to the {} collection.'.format(len(events_to_add), collection_name))
else:
logger.info('Could not retrieve events for the date range you provided.')
return events_to_add
def date_from_event_ts(event):
return parse(event['keen']['timestamp']).date()
def generate_events_between_events(given_days, first_event):
first_day = given_days[0]
last_day = given_days[-1]
next_day = first_day + datetime.timedelta(1)
first_event['keen'].pop('created_at')
first_event['keen'].pop('id')
first_event['generated'] = True # Add value to tag generated data
generated_events = []
while next_day < last_day:
new_event = copy.deepcopy(first_event)
new_event['keen']['timestamp'] = datetime.datetime(next_day.year, next_day.month, next_day.day).replace(tzinfo=pytz.UTC).isoformat()
if next_day not in given_days:
generated_events.append(new_event)
next_day += datetime.timedelta(1)
if generated_events:
logger.info('Generated {} events for the interval {} to {}'.format(
len(generated_events),
given_days[0].isoformat(),
given_days[1].isoformat()
)
)
return generated_events
def get_keen_client():
keen_project = keen_settings['private'].get('project_id')
read_key = keen_settings['private'].get('read_key')
master_key = keen_settings['private'].get('master_key')
write_key = keen_settings['private'].get('write_key')
if keen_project and read_key and master_key:
client = KeenClient(
project_id=keen_project,
read_key=read_key,
master_key=master_key,
write_key=write_key
)
else:
raise ValueError('Cannot connect to Keen clients - all keys not provided.')
return client
def extract_events_from_keen(client, event_collection, start_date=None, end_date=None):
""" Get analytics from keen to use as a starting point for smoothing or transferring
:param client: keen client to use for connection
:param start_date: datetime object, datetime to start gathering from keen
:param end_date: datetime object, datetime to stop gathering from keen
:param event_collection: str, name of the event collection to gather from
:return: a list of keen events to use in other methods
"""
timeframe = VERY_LONG_TIMEFRAME
if start_date and end_date:
logger.info('Gathering events from the {} collection between {} and {}'.format(event_collection, start_date, end_date))
timeframe = {'start': start_date.isoformat(), 'end': end_date.isoformat()}
else:
logger.info('Gathering events from the {} collection using timeframe {}'.format(event_collection, VERY_LONG_TIMEFRAME))
<|fim▁hole|>
def make_sure_keen_schemas_match(source_collection, destination_collection, keen_client):
""" Helper function to check if two given collections have matching schemas in keen, to make sure
they can be transfered between one another
:param source_collection: str, collection that events are stored now
:param destination_collection: str, collection to transfer to
:param keen_client: KeenClient, instantiated for the connection
:return: bool, if the two schemas match in keen
"""
source_schema = keen_client.get_collection(source_collection)
destination_schema = keen_client.get_collection(destination_collection)
return source_schema == destination_schema
def transfer_events_to_another_collection(client, source_collection, destination_collection, dry, reverse=False):
""" Transfer analytics from source collection to the destination collection.
Will only work if the source and destination have the same schemas attached, will error if they don't
:param client: KeenClient, client to use to make connection to keen
:param source_collection: str, keen collection to transfer from
:param destination_collection: str, keen collection to transfer to
:param dry: bool, whether or not to make a dry run, aka actually send events to keen
:return: None
"""
schemas_match = make_sure_keen_schemas_match(source_collection, destination_collection, client)
if not schemas_match:
raise ValueError('The two provided schemas in keen do not match, you will need to do a bit more work.')
events_from_source = extract_events_from_keen(client, source_collection)
for event in events_from_source:
event['keen'].pop('created_at')
event['keen'].pop('id')
if reverse:
remove_events_from_keen(client, destination_collection, events_from_source, dry)
else:
add_events_to_keen(client, destination_collection, events_from_source, dry)
logger.info(
'Transferred {} events from the {} collection to the {} collection'.format(
len(events_from_source),
source_collection,
destination_collection
)
)
def add_events_to_keen(client, collection, events, dry):
logger.info('Adding {} events to the {} collection...'.format(len(events), collection))
if not dry:
client.add_events({collection: events})
def smooth_events_in_keen(client, source_collection, start_date, end_date, dry, reverse):
base_events = extract_events_from_keen(client, source_collection, start_date, end_date)
events_to_fill_in = fill_in_event_gaps(source_collection, base_events)
if reverse:
remove_events_from_keen(client, source_collection, events_to_fill_in, dry)
else:
add_events_to_keen(client, source_collection, events_to_fill_in, dry)
def remove_events_from_keen(client, source_collection, events, dry):
for event in events:
filters = [{'property_name': 'keen.timestamp', 'operator': 'eq', 'property_value': event['keen']['timestamp']}]
# test to see if you get back the correct events from keen
filtered_event = client.extraction(source_collection, filters=filters)
if filtered_event:
filtered_event = filtered_event[0]
filtered_event['keen'].pop('id')
filtered_event['keen'].pop('created_at')
filtered_event['keen']['timestamp'] = filtered_event['keen']['timestamp'][:10] # ends of timestamps differ
event['keen']['timestamp'] = event['keen']['timestamp'][:10]
if event != filtered_event:
logger.error('Filtered event not equal to the event you have gathered, not removing...')
else:
logger.info('About to delete a generated event from the {} collection from the date {}'.format(
source_collection, event['keen']['timestamp']
))
if not dry:
client.delete_events(source_collection, filters=filters)
else:
logger.info('No filtered event found.')
def import_old_events_from_spreadsheet():
home = os.path.expanduser('~')
spreadsheet_path = home + '/daily_user_counts.csv'
key_map = {
'active-users': 'active',
'logs-gte-11-total': 'depth',
'number_users': 'total_users', # really is active - number_users
'number_projects': 'projects.total',
'number_projects_public': 'projects.public',
'number_projects_registered': 'registrations.total',
'Date': 'timestamp',
'dropbox-users-enabled': 'enabled',
'dropbox-users-authorized': 'authorized',
'dropbox-users-linked': 'linked',
'profile-edits': 'profile_edited'
}
with open(spreadsheet_path) as csvfile:
reader = csv.reader(csvfile, delimiter=',')
col_names = next(reader)
dictReader = csv.DictReader(open(spreadsheet_path, 'rb'), fieldnames=col_names, delimiter=',')
events = []
for row in dictReader:
event = {}
for key in row:
equiv_key = key_map.get(key, None)
if equiv_key:
event[equiv_key] = row[key]
events.append(event)
user_summary_cols = ['active', 'depth', 'total_users', 'timestamp', 'profile_edited']
node_summary_cols = ['registrations.total', 'projects.total', 'projects.public', 'timestamp']
addon_summary_cols = ['enabled', 'authorized', 'linked', 'timestamp']
user_events = []
node_events = []
addon_events = []
for event in events[3:]: # The first few rows have blank and/or bad data because they're extra headers
node_event = {}
user_event = {}
addon_event = {}
for key, value in event.items():
if key in node_summary_cols:
node_event[key] = value
if key in user_summary_cols:
user_event[key] = value
if key in addon_summary_cols:
addon_event[key] = value
formatted_user_event = format_event(user_event, analytics_type='user')
formatted_node_event = format_event(node_event, analytics_type='node')
formatted_addon_event = format_event(addon_event, analytics_type='addon')
if formatted_node_event:
node_events.append(formatted_node_event)
if formatted_user_event:
user_events.append(formatted_user_event)
if formatted_addon_event:
addon_events.append(formatted_addon_event)
logger.info(
'Gathered {} old user events, {} old node events and {} old dropbox addon events for keen'.format(
len(user_events),
len(node_events),
len(addon_events)
)
)
return {'user_summary': user_events, 'node_summary': node_events, 'addon_snapshot': addon_events}
def comma_int(value):
if value and value != 'MISSING':
return int(value.replace(',', ''))
def format_event(event, analytics_type):
user_event_template = {
'status': {},
'keen': {}
}
node_event_template = {
'projects': {},
'registered_projects': {},
'keen': {}
}
addon_event_template = {
'keen': {},
'users': {}
}
template_to_use = None
if analytics_type == 'user':
template_to_use = user_event_template
if event['active'] and event['active'] != 'MISSING':
template_to_use['status']['active'] = comma_int(event['active'])
if event['total_users'] and event['active']:
template_to_use['status']['unconfirmed'] = comma_int(event['total_users']) - comma_int(event['active'])
if event['profile_edited']:
template_to_use['status']['profile_edited'] = comma_int(event['profile_edited'])
elif analytics_type == 'node':
template_to_use = node_event_template
if event['projects.total']:
template_to_use['projects']['total'] = comma_int(event['projects.total'])
if event['projects.public']:
template_to_use['projects']['public'] = comma_int(event['projects.public'])
if event['registrations.total']:
template_to_use['registered_projects']['total'] = comma_int(event['registrations.total'])
if event['projects.total'] and event['projects.public']:
template_to_use['projects']['private'] = template_to_use['projects']['total'] - template_to_use['projects']['public']
elif analytics_type == 'addon':
template_to_use = addon_event_template
if event['enabled']:
template_to_use['users']['enabled'] = comma_int(event['enabled'])
if event['authorized']:
template_to_use['users']['authorized'] = comma_int(event['authorized'])
if event['linked']:
template_to_use['users']['linked'] = comma_int(event['linked'])
if event['authorized'] or event['enabled'] or event['linked']:
template_to_use['provider'] = {'name': 'dropbox'}
template_to_use['keen']['timestamp'] = parse(event['timestamp']).replace(hour=12, tzinfo=pytz.UTC).isoformat()
template_to_use['imported'] = True
formatted_event = {key: value for key, value in template_to_use.items() if value}
if len(formatted_event.items()) > 2: # if there's more than just the auto-added timestamp for keen
return template_to_use
def remove_event_from_keen(client, source_collection, event_id):
filters = [{'property_name': 'keen.id', 'operator': 'eq', 'property_value': event_id}]
client.delete_events(source_collection, filters=filters)
def parse_and_send_old_events_to_keen(client, dry, reverse):
old_events = import_old_events_from_spreadsheet()
for key, value in old_events.items():
if reverse:
remove_events_from_keen(client, key, value, dry)
else:
add_events_to_keen(client, key, value, dry)
def main():
""" Main function for moving around and adjusting analytics gotten from keen and sending them back to keen.
Usage:
* Transfer all events from the 'institution_analytics' to the 'institution_summary' collection:
`python -m scripts.analytics.migrate_analytics -d -t -sc institution_analytics -dc institution_summary`
* Fill in the gaps in analytics for the 'addon_snapshot' collection between 2016-11-01 and 2016-11-15:
`python -m scripts.analytics.migrate_analytics -d -sm -sc addon_snapshot -s 2016-11-01 -e 2016-11-15`
* Reverse the above action by adding -r:
`python -m scripts.analytics.migrate_analytics -d -sm -sc addon_snapshot -s 2016-11-01 -e 2016-11-15 -r`
* Parse old analytics from the old analytics CSV stored on your filesystem:
`python -m scripts.analytics.migrate_analytics -o -d`
"""
args = parse_args()
client = get_keen_client()
dry = args.dry
reverse = args.reverse
if args.remove_event:
remove_event_from_keen(client, args.source_collection, args.remove_event)
if args.smooth_events:
smooth_events_in_keen(client, args.source_collection, parse(args.start_date), parse(args.end_date), dry, reverse)
elif args.transfer_collection:
transfer_events_to_another_collection(client, args.source_collection, args.destination_collection, dry, reverse)
elif args.old_analytics:
parse_and_send_old_events_to_keen(client, dry, reverse)
if __name__ == '__main__':
main()<|fim▁end|>
|
return client.extraction(event_collection, timeframe=timeframe)
|
<|file_name|>s3.go<|end_file_name|><|fim▁begin|>package s3
import (
"bytes"
"context"
"errors"
"io"
"os"
"path/filepath"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/go-spatial/tegola"
"github.com/go-spatial/tegola/cache"
"github.com/go-spatial/tegola/dict"
"github.com/go-spatial/tegola/mvt"
)
var (
ErrMissingBucket = errors.New("s3cache: missing required param 'bucket'")
)
const CacheType = "s3"
const (
// required
ConfigKeyBucket = "bucket"
// optional
ConfigKeyBasepath = "basepath"
ConfigKeyMaxZoom = "max_zoom"
ConfigKeyRegion = "region" // defaults to "us-east-1"
ConfigKeyEndpoint = "endpoint" // defaults to ""
ConfigKeyAWSAccessKeyID = "aws_access_key_id"
ConfigKeyAWSSecretKey = "aws_secret_access_key"
ConfigKeyACL = "access_control_list" // defaults to ""
ConfigKeyCacheControl = "cache_control" // defaults to ""
ConfigKeyContentType = "content_type" // defaults to "application/vnd.mapbox-vector-tile"
)
const (
DefaultBasepath = ""
DefaultRegion = "us-east-1"
DefaultAccessKey = ""
DefaultSecretKey = ""
DefaultContentType = mvt.MimeType
DefaultEndpoint = ""
)
// testData is used during New() to confirm the ability to write, read and purge the cache
var testData = []byte{0x1f, 0x8b, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0x2a, 0xce, 0xcc, 0x49, 0x2c, 0x6, 0x4, 0x0, 0x0, 0xff, 0xff, 0xaf, 0x9d, 0x59, 0xca, 0x5, 0x0, 0x0, 0x0}
func init() {
cache.Register(CacheType, New)
}
// New instantiates a S3 cache. The config expects the following params:
//
// required:
// bucket (string): the name of the s3 bucket to write to
//
// optional:
// region (string): the AWS region the bucket is located. defaults to 'us-east-1'
// aws_access_key_id (string): an AWS access key id
// aws_secret_access_key (string): an AWS secret access key
// basepath (string): a path prefix added to all cache operations inside of the S3 bucket
// max_zoom (int): max zoom to use the cache. beyond this zoom cache Set() calls will be ignored
// endpoint (string): the endpoint where the S3 compliant backend is located. only necessary for non-AWS deployments. defaults to ''
// access_control_list (string): the S3 access control to set on the file when putting the file. defaults to ''.
// cache_control (string): the http cache-control header to set on the file when putting the file. defaults to ''.
// content_type (string): the http MIME-type set on the file when putting the file. defaults to 'application/vnd.mapbox-vector-tile'.
func New(config dict.Dicter) (cache.Interface, error) {
var err error
s3cache := Cache{}
// the config map's underlying value is int
defaultMaxZoom := uint(tegola.MaxZ)
maxZoom, err := config.Uint(ConfigKeyMaxZoom, &defaultMaxZoom)
if err != nil {
return nil, err
}
s3cache.MaxZoom = maxZoom
s3cache.Bucket, err = config.String(ConfigKeyBucket, nil)
if err != nil {
return nil, ErrMissingBucket
}
if s3cache.Bucket == "" {
return nil, ErrMissingBucket
}
// basepath
basepath := DefaultBasepath
s3cache.Basepath, err = config.String(ConfigKeyBasepath, &basepath)
if err != nil {
return nil, err
}
// check for region env var
region := os.Getenv("AWS_REGION")
if region == "" {
region = DefaultRegion
}
region, err = config.String(ConfigKeyRegion, ®ion)
if err != nil {
return nil, err
}
accessKey := DefaultAccessKey
accessKey, err = config.String(ConfigKeyAWSAccessKeyID, &accessKey)
if err != nil {
return nil, err
}
secretKey := DefaultSecretKey
secretKey, err = config.String(ConfigKeyAWSSecretKey, &secretKey)
if err != nil {
return nil, err
}
awsConfig := aws.Config{
Region: aws.String(region),
}
// check for endpoint env var
endpoint := os.Getenv("AWS_ENDPOINT")
if endpoint == "" {
endpoint = DefaultEndpoint
}
endpoint, err = config.String(ConfigKeyEndpoint, &endpoint)
if err != nil {
return nil, err
}
// support for static credentials, this is not recommended by AWS but
// necessary for some environments
if accessKey != "" && secretKey != "" {
awsConfig.Credentials = credentials.NewStaticCredentials(accessKey, secretKey, "")
}
// if an endpoint is set, add it to the awsConfig
// otherwise do not set it and it will automatically use the correct aws-s3 endpoint
if endpoint != "" {
awsConfig.Endpoint = aws.String(endpoint)
}
// setup the s3 session.
// if the accessKey and secreteKey are not provided (static creds) then the provider chain is used
// http://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html
s3cache.Client = s3.New(
session.New(&awsConfig),
)
// check for control_access_list env var
acl := os.Getenv("AWS_ACL")
acl, err = config.String(ConfigKeyACL, &acl)
if err != nil {
return nil, err
}
s3cache.ACL = acl
// check for cache_control env var
cachecontrol := os.Getenv("AWS_CacheControl")
cachecontrol, err = config.String(ConfigKeyCacheControl, &cachecontrol)
if err != nil {
return nil, err
}
s3cache.CacheControl = cachecontrol
contenttype := DefaultContentType
contenttype, err = config.String(ConfigKeyContentType, &contenttype)
if err != nil {
return nil, err
}
s3cache.ContentType = contenttype
// in order to confirm we have the correct permissions on the bucket create a small file
// and test a PUT, GET and DELETE to the bucket
key := cache.Key{
MapName: "tegola-test-map",
LayerName: "test-layer",
Z: 0,
X: 0,
Y: 0,
}
// write gzip encoded test file
if err := s3cache.Set(&key, testData); err != nil {
e := cache.ErrSettingToCache{
CacheType: CacheType,
Err: err,
}
return nil, e
}
// read the test file
_, hit, err := s3cache.Get(&key)
if err != nil {
e := cache.ErrGettingFromCache{
CacheType: CacheType,
Err: err,
}
return nil, e
}
if !hit {
// return an error?
}
// purge the test file
if err := s3cache.Purge(&key); err != nil {
e := cache.ErrPurgingCache{
CacheType: CacheType,
Err: err,
}
return nil, e
}
return &s3cache, nil
}
type Cache struct {
// Bucket is the name of the s3 bucket to operate on
Bucket string
// Basepath is a path prefix added to all cache operations inside of the S3 bucket
// helpful so a bucket does not need to be dedicated to only this cache
Basepath string
// MaxZoom determines the max zoom the cache to persist. Beyond this
// zoom, cache Set() calls will be ignored. This is useful if the cache
// should not be leveraged for higher zooms when data changes often.
MaxZoom uint
// client holds a reference to the s3 client. it's expected the client
// has an active session and read, write, delete permissions have been checked
Client *s3.S3
// ACL is the aws ACL, if the not set it will use the default value for aws.
ACL string
// CacheControl is the http Cache Control header, if the not set it will use the default value for aws.
CacheControl string
// ContentType is MIME content type of the tile. Default is "application/vnd.mapbox-vector-tile"
ContentType string
}
func (s3c *Cache) Set(key *cache.Key, val []byte) error {
var err error
// check for maxzoom
if key.Z > s3c.MaxZoom {
return nil
}
// add our basepath
k := filepath.Join(s3c.Basepath, key.String())
input := s3.PutObjectInput{
Body: aws.ReadSeekCloser(bytes.NewReader(val)),
Bucket: aws.String(s3c.Bucket),
Key: aws.String(k),
ContentType: aws.String(s3c.ContentType),
ContentEncoding: aws.String("gzip"),
}
if s3c.ACL != "" {
input.ACL = aws.String(s3c.ACL)
}
if s3c.CacheControl != "" {
input.CacheControl = aws.String(s3c.CacheControl)
}
_, err = s3c.Client.PutObject(&input)
if err != nil {
return err
}
return nil
}
func (s3c *Cache) Get(key *cache.Key) ([]byte, bool, error) {
var err error
// add our basepath
k := filepath.Join(s3c.Basepath, key.String())
input := s3.GetObjectInput{
Bucket: aws.String(s3c.Bucket),
Key: aws.String(k),
}
// GetObjectWithContenxt is used here so the "Accept-Encoding: gzip" header can be added
// without this our gzip response will be decompressed by the underlying transport
result, err := s3c.Client.GetObjectWithContext(context.Background(), &input, func(r *request.Request) {
r.HTTPRequest.Header.Add("Accept-Encoding", "gzip")
})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
case s3.ErrCodeNoSuchKey:
return nil, false, nil
default:
return nil, false, aerr
}
}
return nil, false, err
}
var buf bytes.Buffer
_, err = io.Copy(&buf, result.Body)
if err != nil {
return nil, false, err
}
return buf.Bytes(), true, nil
}
func (s3c *Cache) Purge(key *cache.Key) error {
var err error
// add our basepath
k := filepath.Join(s3c.Basepath, key.String())
input := s3.DeleteObjectInput{
Bucket: aws.String(s3c.Bucket),
Key: aws.String(k),
}<|fim▁hole|> if err != nil {
return err
}
return nil
}<|fim▁end|>
|
_, err = s3c.Client.DeleteObject(&input)
|
<|file_name|>NormalMapDemo.java<|end_file_name|><|fim▁begin|>// External imports
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.FileInputStream;
import java.io.BufferedInputStream;
import javax.imageio.ImageIO;
import javax.swing.*;
// Local imports
import org.j3d.renderer.aviatrix3d.texture.TextureCreateUtils;
/**
* Example application that demonstrates how to use the loader interface
* to load a file into the scene graph.
* <p>
*
* @author Justin Couch
* @version $Revision: 1.1 $
*/
public class NormalMapDemo extends JFrame
implements ActionListener
{
private JFileChooser openDialog;
/** Renderer for the basic image */
private ImageIcon srcIcon;
private JLabel srcLabel;
/** Renderer for the normal map version */
private ImageIcon mapIcon;
private JLabel mapLabel;
/** Utility for munging textures to power of 2 size */
private TextureCreateUtils textureUtils;
public NormalMapDemo()
{
super("Normal map conversion demo");
setSize(1280, 1024);
setLocation(0, 0);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
textureUtils = new TextureCreateUtils();
JPanel p1 = new JPanel(new BorderLayout());
srcIcon = new ImageIcon();
srcLabel = new JLabel();
srcLabel.setVerticalTextPosition(SwingConstants.BOTTOM);
srcLabel.setText("Source Image");
mapIcon = new ImageIcon();
mapLabel = new JLabel();
mapLabel.setVerticalTextPosition(SwingConstants.BOTTOM);
mapLabel.setText("NormalMap Image");
JButton b = new JButton("Open A file");
b.addActionListener(this);
p1.add(b, BorderLayout.SOUTH);
p1.add(srcLabel, BorderLayout.WEST);
p1.add(mapLabel, BorderLayout.EAST);
<|fim▁hole|> // Methods defined by WindowListener
//---------------------------------------------------------------
/**
* Process the action event from the open button
*/
public void actionPerformed(ActionEvent evt)
{
if(openDialog == null)
openDialog = new JFileChooser();
int ret_val = openDialog.showOpenDialog(this);
if(ret_val != JFileChooser.APPROVE_OPTION)
return;
File file = openDialog.getSelectedFile();
try
{
System.out.println("Loading external file: " + file);
FileInputStream is = new FileInputStream(file);
BufferedInputStream stream = new BufferedInputStream(is);
BufferedImage img = ImageIO.read(stream);
if(img == null)
{
System.out.println("Image load barfed");
return;
}
srcIcon.setImage(img);
srcLabel.setIcon(srcIcon);
BufferedImage map_img = textureUtils.createNormalMap(img, null);
mapIcon.setImage(map_img);
mapLabel.setIcon(mapIcon);
}
catch(IOException ioe)
{
System.out.println("crashed " + ioe.getMessage());
ioe.printStackTrace();
}
}
//---------------------------------------------------------------
// Local methods
//---------------------------------------------------------------
public static void main(String[] args)
{
NormalMapDemo demo = new NormalMapDemo();
demo.setVisible(true);
}
}<|fim▁end|>
|
getContentPane().add(p1);
}
//---------------------------------------------------------------
|
<|file_name|>etheraddress64.hh<|end_file_name|><|fim▁begin|>// -*- related-file-name: "../../lib/etheraddress64.cc" -*-
#ifndef CLICK_ETHERADDRESS64_HH
#define CLICK_ETHERADDRESS64_HH
#include <click/string.hh>
#include <click/glue.hh>
#include <click/type_traits.hh>
#if !CLICK_TOOL
# include <click/nameinfo.hh>
# include <click/standard/addressinfo.hh>
#endif
CLICK_DECLS
class EtherAddress64 {
public:
typedef uninitialized_type uninitialized_t;
/** @brief Construct an EtherAddress64 equal to 00-00-00-00-00-00-00-00. */
inline EtherAddress64() {
_data[0] = _data[1] = _data[2] = _data[3] = 0;
}
/** @brief Construct an EtherAddress from data.
* @param data the address data, in network byte order
*
* The bytes data[0]...data[7] are used to construct the address. */
explicit inline EtherAddress64(const unsigned char *data) {
memcpy(_data, data, 7);
}
/** @brief Construct an uninitialized EtherAddress64. */
inline EtherAddress64(const uninitialized_type &unused) {
(void) unused;
}
/** @brief Return the broadcast EtherAddress64, FF-FF-FF-FF-FF-FF-FF-FF. */
static EtherAddress64 make_broadcast() {
return EtherAddress64(0xFFFF);
}
typedef bool (EtherAddress64::*unspecified_bool_type)() const;
/** @brief Return true iff the address is not 00-00-00-00-00-00. */
inline operator unspecified_bool_type() const {
return _data[0] || _data[1] || _data[2] ? &EtherAddress64::is_group : 0;
}
/** @brief Return true iff this address is a group address.
*
* Group addresses have the low-order bit of the first byte set to 1, as
* in 01-00-00-00-00-00-00-00 or 03-00-00-02-04-09-04-02. */
inline bool is_group() const {
return data()[0] & 1;
}
/** @brief Return true iff this address is a "local" address.
*
* Local addresses have the next-to-lowest-order bit of the first byte set
* to 1. */
inline bool is_local() const {
return data()[0] & 2;
}
/** @brief Return true iff this address is the broadcast address.
*
* The Ethernet broadcast address is FF-FF-FF-FF-FF-FF-FF-FF. */
inline bool is_broadcast() const {<|fim▁hole|> }
/** @brief Return a pointer to the address data. */
inline unsigned char *data() {
return reinterpret_cast<unsigned char *>(_data);
}
/** @overload */
inline const unsigned char *data() const {
return reinterpret_cast<const unsigned char *>(_data);
}
/** @brief Return a pointer to the address data, as an array of
* uint16_ts. */
inline const uint16_t *sdata() const {
return _data;
}
/** @brief Hash function. */
inline size_t hashcode() const {
return (_data[2] | ((size_t) _data[1] << 16)) ^ ((size_t) _data[0] << 9);
}
/** @brief Unparse this address into a dash-separated hex String.
*
* Examples include "00-00-00-00-00-00-00-00" and "00-05-4E-50-3C-1A-BB-CC".
*
* @note The IEEE standard for printing Ethernet addresses uses dashes as
* separators, not colons. Use unparse_colon() to unparse into the
* nonstandard colon-separated form. */
inline String unparse() const {
return unparse_dash();
}
/** @brief Unparse this address into a colon-separated hex String.
*
* Examples include "00:00:00:00:00:00:00:00" and "00:05:4E:50:3C:1A:BB:CC".
*
* @note Use unparse() to create the IEEE standard dash-separated form. */
String unparse_colon() const;
/** @brief Unparse this address into a dash-separated hex String.
*
* Examples include "00-00-00-00-00-00-00-00" and "00-05-4E-50-3C-1A-BB-CC".
*
* @note This is the IEEE standard for printing Ethernet addresses.
* @sa unparse_colon */
String unparse_dash() const;
typedef const EtherAddress64 ¶meter_type;
private:
uint16_t _data[4];
EtherAddress64(uint16_t m) {
_data[0] = _data[1] = _data[2] = _data[3] = m;
}
} CLICK_SIZE_PACKED_ATTRIBUTE;
/** @relates EtherAddress64
@brief Compares two EtherAddress64 objects for equality. */
inline bool operator==(const EtherAddress64 &a, const EtherAddress64 &b) {
return (a.sdata()[0] == b.sdata()[0] &&
a.sdata()[1] == b.sdata()[1] &&
a.sdata()[2] == b.sdata()[2] &&
a.sdata()[3] == b.sdata()[3]);
}
/** @relates EtherAddress64
@brief Compares two EtherAddress64 objects for inequality. */
inline bool operator!=(const EtherAddress64 &a, const EtherAddress64 &b) {
return !(a == b);
}
class ArgContext;
class Args;
extern const ArgContext blank_args;
/** @class EtherAddress64Arg
@brief Parser class for Ethernet addresses.
This is the default parser for objects of EtherAddress64 type. For 8-byte
arrays like "click_ether::ether_shost" and "click_ether::ether_dhost", you
must pass an EtherAddressArg() explicitly:
@code
struct click_ether ethh;
... Args(...) ...
.read_mp("SRC", EtherAddressArg(), ethh.ether_shost)
...
@endcode */
class EtherAddress64Arg {
public:
typedef void enable_direct_parse;
static bool parse(const String &str, EtherAddress64 &value,
const ArgContext &args = blank_args);
static bool parse(const String &str, unsigned char *value,
const ArgContext &args = blank_args) {
return parse(str, *reinterpret_cast<EtherAddress64 *>(value), args);
}
static bool direct_parse(const String &str, EtherAddress64 &value,
Args &args);
static bool direct_parse(const String &str, unsigned char *value,
Args &args) {
return direct_parse(str, *reinterpret_cast<EtherAddress64 *>(value),
args);
}
};
template<> struct DefaultArg<EtherAddress64> : public EtherAddress64Arg {};
template<> struct has_trivial_copy<EtherAddress64> : public true_type {};
CLICK_ENDDECLS
#endif<|fim▁end|>
|
return _data[0] + _data[1] + _data[2] + _data[3] == 0x03FC;
|
<|file_name|>ffi.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 Maciej Piechotka
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
use std::libc::{c_char, c_int, c_long, c_uchar, c_ushort, c_void};
pub type xmlChar = c_uchar;
#[deriving(ToStr)]
#[repr(C)]
pub enum xmlElementType {
ElementNode = 1,
AttributeNode = 2,
TextNode = 3,
CDataSectionNode = 4,
EntityRefNode = 5,
EntityNode = 6,
PINode = 7,
CommentNode = 8,
DocumentNode = 9,
DocumentTypeNode = 10,
DocumentFragNode = 11,
NotationNode = 12,
HtmlDocumentNode = 13,
DTDNode = 14,
ElementDecl = 15,
AttributeDecl = 16,
EntityDecl = 17,
NamespaceDecl = 18,
XIncludeStart = 19,
XIncludeEnd = 20,
DOCBDocumentNode = 21
}
pub struct xmlAttr {
_private: *c_void,
_type: xmlElementType, // AttributeNode
name: *c_char,
children: *xmlNode,
last: *xmlNode,
parent: *xmlNode,
next: *xmlAttr,
prev: *xmlAttr,
doc: *xmlDoc,
ns: *xmlNs,
atype: xmlAttributeType,
psvi: *c_void
}
#[allow(dead_code)]
pub struct xmlAttribute {
_private: *c_void,
_type: xmlElementType, // AttributeDecl
name: *c_char,
children: *xmlNode, // NULL
last: *xmlNode, // NULL
parent: *xmlDtd, // NULL
next: *xmlNode,
prev: *xmlNode,
doc: *xmlDoc,
nexth: *xmlAttribute,
atype: xmlAttributeType,
def: xmlAttributeDefault,
defaultValue: *xmlChar,
tree: *xmlEnumeration,
prefix: *xmlChar,
elem: *xmlChar
}
#[allow(dead_code)]
#[repr(C)]
pub enum xmlAttributeDefault {
None = 1,
Required = 2,
Implied = 3,
Fixed = 4
}
#[repr(C)]
pub enum xmlAttributeType {
CDATA = 1,
ID = 2,
IDRef = 3,
IDRefs = 4,
Entity = 5,
Entities = 6,
NMToken = 7,
NMTokens = 8,
Enumeration = 9,
Notation = 10
}
pub struct xmlDoc {
_private: *c_void,
_type: xmlElementType, // DocumentNode
name: *c_char,
children: *xmlNode,
last: *xmlNode,
parent: *xmlNode,
next: *xmlNode,
prev: *xmlNode,
doc: *xmlDoc,<|fim▁hole|> oldNs: *xmlNs,
version: *c_char,
encoding: *c_char,
ids: *c_void,
refs: *c_void,
url: *c_char,
charset: c_int,
dict: *c_void,
psvi: *c_void,
parseFlags: c_int,
properties: c_int
}
pub struct xmlDtd {
_private: *c_void,
_type: xmlElementType, // DTDNode
name: *c_char,
children: *xmlNode,
last: *xmlNode,
parent: *xmlDoc,
next: *xmlNode,
prev: *xmlNode,
doc: *xmlDoc,
notations: *c_void,
elements: *c_void,
attributes: *c_void,
entities: *c_void,
externalId: *xmlChar,
systemId: *xmlChar,
pentities: *c_void
}
#[allow(dead_code)]
pub struct xmlElement {
_private: *c_void,
_type: xmlElementType, // ElementDecl
name: *c_char,
children: *xmlNode, // NULL
last: *xmlNode, // NULL
parent: *xmlDtd,
next: *xmlNode,
prev: *xmlNode,
doc: *xmlDoc,
etype: xmlElementTypeVal,
content: xmlElementContent,
attributes: *xmlAttribute,
prefix: *xmlChar,
contModel: *c_void
}
#[allow(dead_code)]
#[repr(C)]
pub enum xmlElementTypeVal {
Undefined,
Empty,
Any,
Mixed,
ElementType
}
#[allow(dead_code)]
pub struct xmlElementContent {
_type: xmlElementContentType,
ocur: xmlElementContentOccur,
name: *xmlChar,
first: *xmlElementContent,
second: *xmlElementContent,
parent: *xmlElementContent,
prefix: *xmlChar
}
#[allow(dead_code)]
#[repr(C)]
pub enum xmlElementContentType {
PCData,
ElementContent,
Seq,
Or
}
#[allow(dead_code)]
#[repr(C)]
pub enum xmlElementContentOccur {
Once,
Opt,
Mult,
Plus
}
#[allow(dead_code)]
pub struct xmlEntity {
_private: *c_void,
_type: xmlElementType, // EntityDecl
name: *c_char,
children: *xmlNode,
last: *xmlNode,
parent: *xmlDtd,
next: *xmlNode,
prev: *xmlNode,
doc: *xmlDoc,
orig: *xmlChar,
content: *xmlChar,
length: xmlEntityType,
externalID: *xmlChar,
systemID: *xmlChar,
nexte: *c_void,
uri: *xmlChar,
owner: c_int,
checked: c_int
}
#[allow(dead_code)]
#[repr(C)]
pub enum xmlEntityType {
InternalGeneralEntity = 1,
ExternalGeneralParsedEntity = 2,
ExternalGeneralUnparsedEntity = 3,
InternalParameterEntity = 4,
ExternalParameterEntity = 5,
InternalPredefinedEntity = 6
}
#[allow(dead_code)]
pub struct xmlEnumeration {
next: *xmlEnumeration,
name: *xmlChar
}
pub struct xmlNode {
_private: *c_void,
_type: xmlElementType,
name: *xmlChar,
children: *xmlNode,
last: *xmlNode,
parent: *xmlNode,
next: *xmlNode,
prev: *xmlNode,
doc: *xmlDoc,
ns: *xmlNs,
content: *xmlChar,
properties: *xmlAttr,
nsDef: *xmlNs,
psvi: *c_void,
line: c_ushort,
extra: c_ushort
}
pub struct xmlNs {
next: *xmlNs,
_type: xmlElementType,
href: *xmlChar,
prefix: *xmlChar,
private: *c_void,
context: *xmlDoc
}
enum xmlSaveCtxt {}
#[link(name = "xml2")]
extern "C" {
pub fn xmlCheckVersion(version: c_int);
// Parser API
pub fn xmlReadIO(ioread: extern "C" fn (context: *mut c_void, buffer: *mut c_char, len: c_int) -> c_int,
ioclose: extern "C" fn (context: *mut c_void) -> c_int,
context: *mut c_void,
url: *c_char,
encoding: *c_char,
options: c_int) -> *xmlDoc;
// Tree API
pub fn xmlDocGetRootElement(doc: *xmlDoc) -> *xmlNode;
pub fn xmlFreeDoc(doc: *xmlDoc);
// XML Save API
pub fn xmlSaveClose(ctx: *xmlSaveCtxt) -> c_int;
pub fn xmlSaveDoc(ctx: *xmlSaveCtxt, doc: *xmlDoc) -> c_long;
pub fn xmlSaveToIO(iowrite: extern "C" fn (context: *mut c_void, buffer: *c_char, len: c_int) -> c_int,
ioclose: extern "C" fn (context: *mut c_void) -> c_int,
context: *mut c_void,
encoding: *c_char,
options: c_int) -> *xmlSaveCtxt;
}
pub static xmlVersion : c_int = 20901;<|fim▁end|>
|
compression: c_int,
standalone: c_int,
intSubset: *xmlDtd,
extSubset: *xmlDtd,
|
<|file_name|>0022_pinpoint_gateway_fee_amount_null.py<|end_file_name|><|fim▁begin|>from django.db import migrations
from corehq.apps.smsbillables.management.commands.bootstrap_gateway_fees import (
bootstrap_pinpoint_gateway,
)
def add_pinpoint_gateway_fee_for_migration(apps, schema_editor):
bootstrap_pinpoint_gateway(apps)
class Migration(migrations.Migration):
dependencies = [
('smsbillables', '0021_infobip_gateway_fee_amount_null'),
]
operations = [
migrations.RunPython(add_pinpoint_gateway_fee_for_migration),<|fim▁hole|><|fim▁end|>
|
]
|
<|file_name|>iter.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::iter::*;
use core::iter::order::*;
use core::iter::MinMaxResult::*;
use core::num::SignedInt;
use core::usize;
use core::cmp;
use test::Bencher;
#[test]
fn test_lt() {
let empty: [isize; 0] = [];
let xs = [1,2,3];
let ys = [1,2,0];
assert!(!lt(xs.iter(), ys.iter()));
assert!(!le(xs.iter(), ys.iter()));
assert!( gt(xs.iter(), ys.iter()));
assert!( ge(xs.iter(), ys.iter()));
assert!( lt(ys.iter(), xs.iter()));
assert!( le(ys.iter(), xs.iter()));
assert!(!gt(ys.iter(), xs.iter()));
assert!(!ge(ys.iter(), xs.iter()));
assert!( lt(empty.iter(), xs.iter()));
assert!( le(empty.iter(), xs.iter()));
assert!(!gt(empty.iter(), xs.iter()));
assert!(!ge(empty.iter(), xs.iter()));
// Sequence with NaN
let u = [1.0f64, 2.0];
let v = [0.0f64/0.0, 3.0];
assert!(!lt(u.iter(), v.iter()));
assert!(!le(u.iter(), v.iter()));
assert!(!gt(u.iter(), v.iter()));
assert!(!ge(u.iter(), v.iter()));
let a = [0.0f64/0.0];
let b = [1.0f64];
let c = [2.0f64];
assert!(lt(a.iter(), b.iter()) == (a[0] < b[0]));
assert!(le(a.iter(), b.iter()) == (a[0] <= b[0]));
assert!(gt(a.iter(), b.iter()) == (a[0] > b[0]));
assert!(ge(a.iter(), b.iter()) == (a[0] >= b[0]));
assert!(lt(c.iter(), b.iter()) == (c[0] < b[0]));
assert!(le(c.iter(), b.iter()) == (c[0] <= b[0]));
assert!(gt(c.iter(), b.iter()) == (c[0] > b[0]));
assert!(ge(c.iter(), b.iter()) == (c[0] >= b[0]));
}
#[test]
fn test_multi_iter() {
let xs = [1,2,3,4];
let ys = [4,3,2,1];
assert!(eq(xs.iter(), ys.iter().rev()));
assert!(lt(xs.iter(), xs.iter().skip(2)));
}
#[test]
fn test_counter_from_iter() {
let it = (0..).step_by(5).take(10);
let xs: Vec<isize> = FromIterator::from_iter(it);
assert_eq!(xs, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45]);
}
#[test]
fn test_iterator_chain() {
let xs = [0, 1, 2, 3, 4, 5];
let ys = [30, 40, 50, 60];
let expected = [0, 1, 2, 3, 4, 5, 30, 40, 50, 60];
let it = xs.iter().chain(ys.iter());
let mut i = 0;
for &x in it {
assert_eq!(x, expected[i]);
i += 1;
}
assert_eq!(i, expected.len());
let ys = (30..).step_by(10).take(4);
let it = xs.iter().cloned().chain(ys);
let mut i = 0;
for x in it {
assert_eq!(x, expected[i]);
i += 1;
}
assert_eq!(i, expected.len());
}
#[test]
fn test_filter_map() {
let it = (0..).step_by(1).take(10)
.filter_map(|x| if x % 2 == 0 { Some(x*x) } else { None });
assert_eq!(it.collect::<Vec<usize>>(), [0*0, 2*2, 4*4, 6*6, 8*8]);
}
#[test]
fn test_iterator_enumerate() {
let xs = [0, 1, 2, 3, 4, 5];
let it = xs.iter().enumerate();
for (i, &x) in it {
assert_eq!(i, x);
}
}
#[test]
fn test_iterator_peekable() {
let xs = vec![0, 1, 2, 3, 4, 5];
let mut it = xs.iter().cloned().peekable();
assert_eq!(it.len(), 6);
assert_eq!(it.peek().unwrap(), &0);
assert_eq!(it.len(), 6);
assert_eq!(it.next().unwrap(), 0);
assert_eq!(it.len(), 5);
assert_eq!(it.next().unwrap(), 1);
assert_eq!(it.len(), 4);
assert_eq!(it.next().unwrap(), 2);
assert_eq!(it.len(), 3);
assert_eq!(it.peek().unwrap(), &3);
assert_eq!(it.len(), 3);
assert_eq!(it.peek().unwrap(), &3);
assert_eq!(it.len(), 3);
assert_eq!(it.next().unwrap(), 3);
assert_eq!(it.len(), 2);
assert_eq!(it.next().unwrap(), 4);
assert_eq!(it.len(), 1);
assert_eq!(it.peek().unwrap(), &5);
assert_eq!(it.len(), 1);
assert_eq!(it.next().unwrap(), 5);
assert_eq!(it.len(), 0);
assert!(it.peek().is_none());
assert_eq!(it.len(), 0);
assert!(it.next().is_none());
assert_eq!(it.len(), 0);
}
#[test]
fn test_iterator_take_while() {
let xs = [0, 1, 2, 3, 5, 13, 15, 16, 17, 19];
let ys = [0, 1, 2, 3, 5, 13];
let it = xs.iter().take_while(|&x| *x < 15);
let mut i = 0;
for x in it {
assert_eq!(*x, ys[i]);
i += 1;
}
assert_eq!(i, ys.len());
}
#[test]
fn test_iterator_skip_while() {
let xs = [0, 1, 2, 3, 5, 13, 15, 16, 17, 19];
let ys = [15, 16, 17, 19];
let it = xs.iter().skip_while(|&x| *x < 15);
let mut i = 0;
for x in it {
assert_eq!(*x, ys[i]);
i += 1;
}
assert_eq!(i, ys.len());
}
#[test]
fn test_iterator_skip() {
let xs = [0, 1, 2, 3, 5, 13, 15, 16, 17, 19, 20, 30];
let ys = [13, 15, 16, 17, 19, 20, 30];
let mut it = xs.iter().skip(5);
let mut i = 0;
while let Some(&x) = it.next() {
assert_eq!(x, ys[i]);
i += 1;
assert_eq!(it.len(), xs.len()-5-i);
}
assert_eq!(i, ys.len());
assert_eq!(it.len(), 0);
}
#[test]
fn test_iterator_take() {
let xs = [0, 1, 2, 3, 5, 13, 15, 16, 17, 19];
let ys = [0, 1, 2, 3, 5];
let mut it = xs.iter().take(5);
let mut i = 0;
assert_eq!(it.len(), 5);
while let Some(&x) = it.next() {
assert_eq!(x, ys[i]);
i += 1;
assert_eq!(it.len(), 5-i);
}
assert_eq!(i, ys.len());
assert_eq!(it.len(), 0);
}
#[test]
fn test_iterator_take_short() {
let xs = [0, 1, 2, 3];
let ys = [0, 1, 2, 3];
let mut it = xs.iter().take(5);
let mut i = 0;
assert_eq!(it.len(), 4);
while let Some(&x) = it.next() {
assert_eq!(x, ys[i]);
i += 1;
assert_eq!(it.len(), 4-i);
}
assert_eq!(i, ys.len());
assert_eq!(it.len(), 0);
}
#[test]
fn test_iterator_scan() {
// test the type inference
fn add(old: &mut isize, new: &usize) -> Option<f64> {
*old += *new as isize;
Some(*old as f64)
}
let xs = [0, 1, 2, 3, 4];
let ys = [0f64, 1.0, 3.0, 6.0, 10.0];
let it = xs.iter().scan(0, add);
let mut i = 0;
for x in it {
assert_eq!(x, ys[i]);
i += 1;
}
assert_eq!(i, ys.len());
}
#[test]
fn test_iterator_flat_map() {
let xs = [0, 3, 6];
let ys = [0, 1, 2, 3, 4, 5, 6, 7, 8];
let it = xs.iter().flat_map(|&x| (x..).step_by(1).take(3));
let mut i = 0;
for x in it {
assert_eq!(x, ys[i]);
i += 1;
}
assert_eq!(i, ys.len());
}
#[test]
fn test_inspect() {
let xs = [1, 2, 3, 4];
let mut n = 0;
let ys = xs.iter()
.cloned()
.inspect(|_| n += 1)
.collect::<Vec<usize>>();
assert_eq!(n, xs.len());
assert_eq!(&xs[..], &ys[..]);
}<|fim▁hole|> fn count(st: &mut usize) -> Option<usize> {
if *st < 10 {
let ret = Some(*st);
*st += 1;
ret
} else {
None
}
}
let it = Unfold::new(0, count);
let mut i = 0;
for counted in it {
assert_eq!(counted, i);
i += 1;
}
assert_eq!(i, 10);
}
#[test]
fn test_cycle() {
let cycle_len = 3;
let it = (0..).step_by(1).take(cycle_len).cycle();
assert_eq!(it.size_hint(), (usize::MAX, None));
for (i, x) in it.take(100).enumerate() {
assert_eq!(i % cycle_len, x);
}
let mut it = (0..).step_by(1).take(0).cycle();
assert_eq!(it.size_hint(), (0, Some(0)));
assert_eq!(it.next(), None);
}
#[test]
fn test_iterator_nth() {
let v: &[_] = &[0, 1, 2, 3, 4];
for i in 0..v.len() {
assert_eq!(v.iter().nth(i).unwrap(), &v[i]);
}
assert_eq!(v.iter().nth(v.len()), None);
}
#[test]
fn test_iterator_last() {
let v: &[_] = &[0, 1, 2, 3, 4];
assert_eq!(v.iter().last().unwrap(), &4);
assert_eq!(v[..1].iter().last().unwrap(), &0);
}
#[test]
fn test_iterator_len() {
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq!(v[..4].iter().count(), 4);
assert_eq!(v[..10].iter().count(), 10);
assert_eq!(v[..0].iter().count(), 0);
}
#[test]
fn test_iterator_sum() {
let v: &[i32] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq!(v[..4].iter().cloned().sum(), 6);
assert_eq!(v.iter().cloned().sum(), 55);
assert_eq!(v[..0].iter().cloned().sum(), 0);
}
#[test]
fn test_iterator_product() {
let v: &[i32] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq!(v[..4].iter().cloned().product(), 0);
assert_eq!(v[1..5].iter().cloned().product(), 24);
assert_eq!(v[..0].iter().cloned().product(), 1);
}
#[test]
fn test_iterator_max() {
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq!(v[..4].iter().cloned().max(), Some(3));
assert_eq!(v.iter().cloned().max(), Some(10));
assert_eq!(v[..0].iter().cloned().max(), None);
}
#[test]
fn test_iterator_min() {
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq!(v[..4].iter().cloned().min(), Some(0));
assert_eq!(v.iter().cloned().min(), Some(0));
assert_eq!(v[..0].iter().cloned().min(), None);
}
#[test]
fn test_iterator_size_hint() {
let c = (0..).step_by(1);
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let v2 = &[10, 11, 12];
let vi = v.iter();
assert_eq!(c.size_hint(), (usize::MAX, None));
assert_eq!(vi.clone().size_hint(), (10, Some(10)));
assert_eq!(c.clone().take(5).size_hint(), (5, Some(5)));
assert_eq!(c.clone().skip(5).size_hint().1, None);
assert_eq!(c.clone().take_while(|_| false).size_hint(), (0, None));
assert_eq!(c.clone().skip_while(|_| false).size_hint(), (0, None));
assert_eq!(c.clone().enumerate().size_hint(), (usize::MAX, None));
assert_eq!(c.clone().chain(vi.clone().cloned()).size_hint(), (usize::MAX, None));
assert_eq!(c.clone().zip(vi.clone()).size_hint(), (10, Some(10)));
assert_eq!(c.clone().scan(0, |_,_| Some(0)).size_hint(), (0, None));
assert_eq!(c.clone().filter(|_| false).size_hint(), (0, None));
assert_eq!(c.clone().map(|_| 0).size_hint(), (usize::MAX, None));
assert_eq!(c.filter_map(|_| Some(0)).size_hint(), (0, None));
assert_eq!(vi.clone().take(5).size_hint(), (5, Some(5)));
assert_eq!(vi.clone().take(12).size_hint(), (10, Some(10)));
assert_eq!(vi.clone().skip(3).size_hint(), (7, Some(7)));
assert_eq!(vi.clone().skip(12).size_hint(), (0, Some(0)));
assert_eq!(vi.clone().take_while(|_| false).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().skip_while(|_| false).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().enumerate().size_hint(), (10, Some(10)));
assert_eq!(vi.clone().chain(v2.iter()).size_hint(), (13, Some(13)));
assert_eq!(vi.clone().zip(v2.iter()).size_hint(), (3, Some(3)));
assert_eq!(vi.clone().scan(0, |_,_| Some(0)).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().filter(|_| false).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().map(|&i| i+1).size_hint(), (10, Some(10)));
assert_eq!(vi.filter_map(|_| Some(0)).size_hint(), (0, Some(10)));
}
#[test]
fn test_collect() {
let a = vec![1, 2, 3, 4, 5];
let b: Vec<isize> = a.iter().cloned().collect();
assert!(a == b);
}
#[test]
fn test_all() {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let v: Box<[isize]> = Box::new([1, 2, 3, 4, 5]);
assert!(v.iter().all(|&x| x < 10));
assert!(!v.iter().all(|&x| x % 2 == 0));
assert!(!v.iter().all(|&x| x > 100));
assert!(v[..0].iter().all(|_| panic!()));
}
#[test]
fn test_any() {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let v: Box<[isize]> = Box::new([1, 2, 3, 4, 5]);
assert!(v.iter().any(|&x| x < 10));
assert!(v.iter().any(|&x| x % 2 == 0));
assert!(!v.iter().any(|&x| x > 100));
assert!(!v[..0].iter().any(|_| panic!()));
}
#[test]
fn test_find() {
let v: &[isize] = &[1, 3, 9, 27, 103, 14, 11];
assert_eq!(*v.iter().find(|&&x| x & 1 == 0).unwrap(), 14);
assert_eq!(*v.iter().find(|&&x| x % 3 == 0).unwrap(), 3);
assert!(v.iter().find(|&&x| x % 12 == 0).is_none());
}
#[test]
fn test_position() {
let v = &[1, 3, 9, 27, 103, 14, 11];
assert_eq!(v.iter().position(|x| *x & 1 == 0).unwrap(), 5);
assert_eq!(v.iter().position(|x| *x % 3 == 0).unwrap(), 1);
assert!(v.iter().position(|x| *x % 12 == 0).is_none());
}
#[test]
fn test_count() {
let xs = &[1, 2, 2, 1, 5, 9, 0, 2];
assert_eq!(xs.iter().filter(|x| **x == 2).count(), 3);
assert_eq!(xs.iter().filter(|x| **x == 5).count(), 1);
assert_eq!(xs.iter().filter(|x| **x == 95).count(), 0);
}
#[test]
fn test_max_by() {
let xs: &[isize] = &[-3, 0, 1, 5, -10];
assert_eq!(*xs.iter().max_by(|x| x.abs()).unwrap(), -10);
}
#[test]
fn test_min_by() {
let xs: &[isize] = &[-3, 0, 1, 5, -10];
assert_eq!(*xs.iter().min_by(|x| x.abs()).unwrap(), 0);
}
#[test]
fn test_by_ref() {
let mut xs = 0..10;
// sum the first five values
let partial_sum = xs.by_ref().take(5).fold(0, |a, b| a + b);
assert_eq!(partial_sum, 10);
assert_eq!(xs.next(), Some(5));
}
#[test]
fn test_rev() {
let xs = [2, 4, 6, 8, 10, 12, 14, 16];
let mut it = xs.iter();
it.next();
it.next();
assert!(it.rev().cloned().collect::<Vec<isize>>() ==
vec![16, 14, 12, 10, 8, 6]);
}
#[test]
fn test_cloned() {
let xs = [2u8, 4, 6, 8];
let mut it = xs.iter().cloned();
assert_eq!(it.len(), 4);
assert_eq!(it.next(), Some(2));
assert_eq!(it.len(), 3);
assert_eq!(it.next(), Some(4));
assert_eq!(it.len(), 2);
assert_eq!(it.next_back(), Some(8));
assert_eq!(it.len(), 1);
assert_eq!(it.next_back(), Some(6));
assert_eq!(it.len(), 0);
assert_eq!(it.next_back(), None);
}
#[test]
fn test_double_ended_map() {
let xs = [1, 2, 3, 4, 5, 6];
let mut it = xs.iter().map(|&x| x * -1);
assert_eq!(it.next(), Some(-1));
assert_eq!(it.next(), Some(-2));
assert_eq!(it.next_back(), Some(-6));
assert_eq!(it.next_back(), Some(-5));
assert_eq!(it.next(), Some(-3));
assert_eq!(it.next_back(), Some(-4));
assert_eq!(it.next(), None);
}
#[test]
fn test_double_ended_enumerate() {
let xs = [1, 2, 3, 4, 5, 6];
let mut it = xs.iter().cloned().enumerate();
assert_eq!(it.next(), Some((0, 1)));
assert_eq!(it.next(), Some((1, 2)));
assert_eq!(it.next_back(), Some((5, 6)));
assert_eq!(it.next_back(), Some((4, 5)));
assert_eq!(it.next_back(), Some((3, 4)));
assert_eq!(it.next_back(), Some((2, 3)));
assert_eq!(it.next(), None);
}
#[test]
fn test_double_ended_zip() {
let xs = [1, 2, 3, 4, 5, 6];
let ys = [1, 2, 3, 7];
let a = xs.iter().cloned();
let b = ys.iter().cloned();
let mut it = a.zip(b);
assert_eq!(it.next(), Some((1, 1)));
assert_eq!(it.next(), Some((2, 2)));
assert_eq!(it.next_back(), Some((4, 7)));
assert_eq!(it.next_back(), Some((3, 3)));
assert_eq!(it.next(), None);
}
#[test]
fn test_double_ended_filter() {
let xs = [1, 2, 3, 4, 5, 6];
let mut it = xs.iter().filter(|&x| *x & 1 == 0);
assert_eq!(it.next_back().unwrap(), &6);
assert_eq!(it.next_back().unwrap(), &4);
assert_eq!(it.next().unwrap(), &2);
assert_eq!(it.next_back(), None);
}
#[test]
fn test_double_ended_filter_map() {
let xs = [1, 2, 3, 4, 5, 6];
let mut it = xs.iter().filter_map(|&x| if x & 1 == 0 { Some(x * 2) } else { None });
assert_eq!(it.next_back().unwrap(), 12);
assert_eq!(it.next_back().unwrap(), 8);
assert_eq!(it.next().unwrap(), 4);
assert_eq!(it.next_back(), None);
}
#[test]
fn test_double_ended_chain() {
let xs = [1, 2, 3, 4, 5];
let ys = [7, 9, 11];
let mut it = xs.iter().chain(ys.iter()).rev();
assert_eq!(it.next().unwrap(), &11);
assert_eq!(it.next().unwrap(), &9);
assert_eq!(it.next_back().unwrap(), &1);
assert_eq!(it.next_back().unwrap(), &2);
assert_eq!(it.next_back().unwrap(), &3);
assert_eq!(it.next_back().unwrap(), &4);
assert_eq!(it.next_back().unwrap(), &5);
assert_eq!(it.next_back().unwrap(), &7);
assert_eq!(it.next_back(), None);
}
#[test]
fn test_rposition() {
fn f(xy: &(isize, char)) -> bool { let (_x, y) = *xy; y == 'b' }
fn g(xy: &(isize, char)) -> bool { let (_x, y) = *xy; y == 'd' }
let v = [(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert_eq!(v.iter().rposition(f), Some(3));
assert!(v.iter().rposition(g).is_none());
}
#[test]
#[should_panic]
fn test_rposition_panic() {
let v: [(Box<_>, Box<_>); 4] =
[(box 0, box 0), (box 0, box 0),
(box 0, box 0), (box 0, box 0)];
let mut i = 0;
v.iter().rposition(|_elt| {
if i == 2 {
panic!()
}
i += 1;
false
});
}
#[cfg(test)]
fn check_randacc_iter<A, T>(a: T, len: usize) where
A: PartialEq,
T: Clone + RandomAccessIterator + Iterator<Item=A>,
{
let mut b = a.clone();
assert_eq!(len, b.indexable());
let mut n = 0;
for (i, elt) in a.enumerate() {
assert!(Some(elt) == b.idx(i));
n += 1;
}
assert_eq!(n, len);
assert!(None == b.idx(n));
// call recursively to check after picking off an element
if len > 0 {
b.next();
check_randacc_iter(b, len-1);
}
}
#[test]
fn test_double_ended_flat_map() {
let u = [0,1];
let v = [5,6,7,8];
let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter());
assert_eq!(it.next_back().unwrap(), &8);
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.next_back().unwrap(), &7);
assert_eq!(it.next_back().unwrap(), &6);
assert_eq!(it.next_back().unwrap(), &8);
assert_eq!(it.next().unwrap(), &6);
assert_eq!(it.next_back().unwrap(), &7);
assert_eq!(it.next_back(), None);
assert_eq!(it.next(), None);
assert_eq!(it.next_back(), None);
}
#[test]
fn test_random_access_chain() {
let xs = [1, 2, 3, 4, 5];
let ys = [7, 9, 11];
let mut it = xs.iter().chain(ys.iter());
assert_eq!(it.idx(0).unwrap(), &1);
assert_eq!(it.idx(5).unwrap(), &7);
assert_eq!(it.idx(7).unwrap(), &11);
assert!(it.idx(8).is_none());
it.next();
it.next();
it.next_back();
assert_eq!(it.idx(0).unwrap(), &3);
assert_eq!(it.idx(4).unwrap(), &9);
assert!(it.idx(6).is_none());
check_randacc_iter(it, xs.len() + ys.len() - 3);
}
#[test]
fn test_random_access_enumerate() {
let xs = [1, 2, 3, 4, 5];
check_randacc_iter(xs.iter().enumerate(), xs.len());
}
#[test]
fn test_random_access_rev() {
let xs = [1, 2, 3, 4, 5];
check_randacc_iter(xs.iter().rev(), xs.len());
let mut it = xs.iter().rev();
it.next();
it.next_back();
it.next();
check_randacc_iter(it, xs.len() - 3);
}
#[test]
fn test_random_access_zip() {
let xs = [1, 2, 3, 4, 5];
let ys = [7, 9, 11];
check_randacc_iter(xs.iter().zip(ys.iter()), cmp::min(xs.len(), ys.len()));
}
#[test]
fn test_random_access_take() {
let xs = [1, 2, 3, 4, 5];
let empty: &[isize] = &[];
check_randacc_iter(xs.iter().take(3), 3);
check_randacc_iter(xs.iter().take(20), xs.len());
check_randacc_iter(xs.iter().take(0), 0);
check_randacc_iter(empty.iter().take(2), 0);
}
#[test]
fn test_random_access_skip() {
let xs = [1, 2, 3, 4, 5];
let empty: &[isize] = &[];
check_randacc_iter(xs.iter().skip(2), xs.len() - 2);
check_randacc_iter(empty.iter().skip(2), 0);
}
#[test]
fn test_random_access_inspect() {
let xs = [1, 2, 3, 4, 5];
// test .map and .inspect that don't implement Clone
let mut it = xs.iter().inspect(|_| {});
assert_eq!(xs.len(), it.indexable());
for (i, elt) in xs.iter().enumerate() {
assert_eq!(Some(elt), it.idx(i));
}
}
#[test]
fn test_random_access_map() {
let xs = [1, 2, 3, 4, 5];
let mut it = xs.iter().cloned();
assert_eq!(xs.len(), it.indexable());
for (i, elt) in xs.iter().enumerate() {
assert_eq!(Some(*elt), it.idx(i));
}
}
#[test]
fn test_random_access_cycle() {
let xs = [1, 2, 3, 4, 5];
let empty: &[isize] = &[];
check_randacc_iter(xs.iter().cycle().take(27), 27);
check_randacc_iter(empty.iter().cycle(), 0);
}
#[test]
fn test_double_ended_range() {
assert_eq!((11..14).rev().collect::<Vec<_>>(), [13, 12, 11]);
for _ in (10..0).rev() {
panic!("unreachable");
}
assert_eq!((11..14).rev().collect::<Vec<_>>(), [13, 12, 11]);
for _ in (10..0).rev() {
panic!("unreachable");
}
}
#[test]
fn test_range() {
assert_eq!((0..5).collect::<Vec<_>>(), [0, 1, 2, 3, 4]);
assert_eq!((-10..-1).collect::<Vec<_>>(), [-10, -9, -8, -7, -6, -5, -4, -3, -2]);
assert_eq!((0..5).rev().collect::<Vec<_>>(), [4, 3, 2, 1, 0]);
assert_eq!((200..-5).count(), 0);
assert_eq!((200..-5).rev().count(), 0);
assert_eq!((200..200).count(), 0);
assert_eq!((200..200).rev().count(), 0);
assert_eq!((0..100).size_hint(), (100, Some(100)));
// this test is only meaningful when sizeof usize < sizeof u64
assert_eq!((usize::MAX - 1..usize::MAX).size_hint(), (1, Some(1)));
assert_eq!((-10..-1).size_hint(), (9, Some(9)));
assert_eq!((-1..-10).size_hint(), (0, Some(0)));
}
#[test]
fn test_range_inclusive() {
assert!(range_inclusive(0, 5).collect::<Vec<isize>>() ==
vec![0, 1, 2, 3, 4, 5]);
assert!(range_inclusive(0, 5).rev().collect::<Vec<isize>>() ==
vec![5, 4, 3, 2, 1, 0]);
assert_eq!(range_inclusive(200, -5).count(), 0);
assert_eq!(range_inclusive(200, -5).rev().count(), 0);
assert_eq!(range_inclusive(200, 200).collect::<Vec<isize>>(), [200]);
assert_eq!(range_inclusive(200, 200).rev().collect::<Vec<isize>>(), [200]);
}
#[test]
fn test_range_step() {
assert_eq!((0..20).step_by(5).collect::<Vec<isize>>(), [0, 5, 10, 15]);
assert_eq!((20..0).step_by(-5).collect::<Vec<isize>>(), [20, 15, 10, 5]);
assert_eq!((20..0).step_by(-6).collect::<Vec<isize>>(), [20, 14, 8, 2]);
assert_eq!((200..255).step_by(50).collect::<Vec<u8>>(), [200, 250]);
assert_eq!((200..-5).step_by(1).collect::<Vec<isize>>(), []);
assert_eq!((200..200).step_by(1).collect::<Vec<isize>>(), []);
}
#[test]
fn test_range_step_inclusive() {
assert_eq!(range_step_inclusive(0, 20, 5).collect::<Vec<isize>>(), [0, 5, 10, 15, 20]);
assert_eq!(range_step_inclusive(20, 0, -5).collect::<Vec<isize>>(), [20, 15, 10, 5, 0]);
assert_eq!(range_step_inclusive(20, 0, -6).collect::<Vec<isize>>(), [20, 14, 8, 2]);
assert_eq!(range_step_inclusive(200, 255, 50).collect::<Vec<u8>>(), [200, 250]);
assert_eq!(range_step_inclusive(200, -5, 1).collect::<Vec<isize>>(), []);
assert_eq!(range_step_inclusive(200, 200, 1).collect::<Vec<isize>>(), [200]);
}
#[test]
fn test_reverse() {
let mut ys = [1, 2, 3, 4, 5];
ys.iter_mut().reverse_in_place();
assert!(ys == [5, 4, 3, 2, 1]);
}
#[test]
fn test_peekable_is_empty() {
let a = [1];
let mut it = a.iter().peekable();
assert!( !it.is_empty() );
it.next();
assert!( it.is_empty() );
}
#[test]
fn test_min_max() {
let v: [isize; 0] = [];
assert_eq!(v.iter().min_max(), NoElements);
let v = [1];
assert!(v.iter().min_max() == OneElement(&1));
let v = [1, 2, 3, 4, 5];
assert!(v.iter().min_max() == MinMax(&1, &5));
let v = [1, 2, 3, 4, 5, 6];
assert!(v.iter().min_max() == MinMax(&1, &6));
let v = [1, 1, 1, 1];
assert!(v.iter().min_max() == MinMax(&1, &1));
}
#[test]
fn test_min_max_result() {
let r: MinMaxResult<isize> = NoElements;
assert_eq!(r.into_option(), None);
let r = OneElement(1);
assert_eq!(r.into_option(), Some((1,1)));
let r = MinMax(1,2);
assert_eq!(r.into_option(), Some((1,2)));
}
#[test]
fn test_iterate() {
let mut it = iterate(1, |x| x * 2);
assert_eq!(it.next(), Some(1));
assert_eq!(it.next(), Some(2));
assert_eq!(it.next(), Some(4));
assert_eq!(it.next(), Some(8));
}
#[test]
fn test_repeat() {
let mut it = repeat(42);
assert_eq!(it.next(), Some(42));
assert_eq!(it.next(), Some(42));
assert_eq!(it.next(), Some(42));
}
#[test]
fn test_fuse() {
let mut it = 0..3;
assert_eq!(it.len(), 3);
assert_eq!(it.next(), Some(0));
assert_eq!(it.len(), 2);
assert_eq!(it.next(), Some(1));
assert_eq!(it.len(), 1);
assert_eq!(it.next(), Some(2));
assert_eq!(it.len(), 0);
assert_eq!(it.next(), None);
assert_eq!(it.len(), 0);
assert_eq!(it.next(), None);
assert_eq!(it.len(), 0);
assert_eq!(it.next(), None);
assert_eq!(it.len(), 0);
}
#[bench]
fn bench_rposition(b: &mut Bencher) {
let it: Vec<usize> = (0..300).collect();
b.iter(|| {
it.iter().rposition(|&x| x <= 150);
});
}
#[bench]
fn bench_skip_while(b: &mut Bencher) {
b.iter(|| {
let it = 0..100;
let mut sum = 0;
it.skip_while(|&x| { sum += x; sum < 4000 }).all(|_| true);
});
}
#[bench]
fn bench_multiple_take(b: &mut Bencher) {
let mut it = (0..42).cycle();
b.iter(|| {
let n = it.next().unwrap();
for _ in 0..n {
it.clone().take(it.next().unwrap()).all(|_| true);
}
});
}<|fim▁end|>
|
#[test]
fn test_unfoldr() {
|
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate libc;
use std::io;
macro_rules! try_or {
($val:expr, $or:expr) => {
match $val {
Ok(v) => v,
Err(e) => {
return $or(e);
}
}<|fim▁hole|>
pub trait Signed {
fn is_negative(&self) -> bool;
}
impl Signed for i32 {
fn is_negative(&self) -> bool {
*self < 0
}
}
impl Signed for usize {
fn is_negative(&self) -> bool {
(*self as isize) < 0
}
}
#[cfg(any(target_os = "linux"))]
pub fn from_unix_result<T: Signed>(rv: T) -> io::Result<T> {
if rv.is_negative() {
let errno = unsafe { *libc::__errno_location() };
Err(io::Error::from_raw_os_error(errno))
} else {
Ok(rv)
}
}
#[cfg(any(target_os = "freebsd"))]
pub fn from_unix_result<T: Signed>(rv: T) -> io::Result<T> {
if rv.is_negative() {
let errno = unsafe { *libc::__error() };
Err(io::Error::from_raw_os_error(errno))
} else {
Ok(rv)
}
}
#[cfg(any(target_os = "openbsd"))]
pub fn from_unix_result<T: Signed>(rv: T) -> io::Result<T> {
if rv.is_negative() {
Err(io::Error::last_os_error())
} else {
Ok(rv)
}
}
pub fn io_err(msg: &str) -> io::Error {
io::Error::new(io::ErrorKind::Other, msg)
}<|fim▁end|>
|
};
}
|
<|file_name|>App.js<|end_file_name|><|fim▁begin|>import { App } from "./nwGui";
import Process from "./process";
App.removeAllListeners( "open" );
export default App;
export const argv = App.argv;
export const filteredArgv = App.filteredArgv;<|fim▁hole|>export function quit() {
try {
// manually emit the process's exit event
Process.emit( "exit" );
} catch ( e ) {}
App.quit();
}<|fim▁end|>
|
export const manifest = App.manifest;
|
<|file_name|>OscTools.hpp<|end_file_name|><|fim▁begin|>#ifndef OSCTOOLS_HPP_INCLUDED
#define OSCTOOLS_HPP_INCLUDED
class OscOptionalUnpacker
{
ofxOscMessage & msg;
int n;
public:
OscOptionalUnpacker(ofxOscMessage & m):msg(m),n(0){}
OscOptionalUnpacker & operator >> (int & i)
{
if(n < msg.getNumArgs())
{
i = msg.getArgAsInt32( n++ );
}
return *this;
}
OscOptionalUnpacker & operator >> (float & i)
{
if(n < msg.getNumArgs())
{
i = msg.getArgAsFloat( n++ );
}
return *this;
}
OscOptionalUnpacker & operator >> (double & i)
{
if(n < msg.getNumArgs())
{
i = msg.getArgAsFloat( n++ );
}
return *this;
}
OscOptionalUnpacker & operator >> (std::string & i)<|fim▁hole|> {
i = msg.getArgAsString( n++ );
}
return *this;
}
bool Eos()
{
return n >= msg.getNumArgs();
}
};
class OscPacker
{
ofxOscMessage & msg;
public:
OscPacker(ofxOscMessage & m):msg(m){}
OscPacker & operator << (int i)
{
msg.addIntArg(i);
return *this;
}
OscPacker & operator << (unsigned int i)
{
msg.addIntArg(i);
return *this;
}
OscPacker & operator << (float i)
{
msg.addFloatArg(i);
return *this;
}
OscPacker & operator << (const std::string & i)
{
msg.addStringArg(i);
return *this;
}
};
#endif // OSCTOOLS_HPP_INCLUDED<|fim▁end|>
|
{
if(n < msg.getNumArgs())
|
<|file_name|>facebook.js<|end_file_name|><|fim▁begin|>var
util = require('util'),
querystring = require('querystring'),
request = require('request');
function FacebookProvider(client_id, client_secret, redirect_uri) {
this.client_id = client_id;
this.client_secret = client_secret;
this.redirect_uri = redirect_uri;
}
FacebookProvider.prototype.getAuthenticateURL = function (options) {
return util.format('https://www.facebook.com/dialog/oauth?client_id=%s&response_type=%s&state=%s&redirect_uri=%s',
(options && options.client_id) || this.client_id,
'code',
String(Math.random() * 100000000),
encodeURIComponent((options && options.redirect_uri) || this.redirect_uri));
};
FacebookProvider.prototype.getAuthentication = function (options, callback) {
var
that = this,
qs = {
client_id: this.client_id,
client_secret: this.client_secret,
grant_type: 'authorization_code',
redirect_uri: options.redirect_uri || this.redirect_uri,
code: options.code
};
request({
method: 'GET',
uri: 'https://graph.facebook.com/oauth/access_token',
qs: qs,
timeout: 5000 // 5 seconds
}, function (err, res, body) {
if (err) {
return callback(err);
}
if (res.statusCode !== 200) {
return callback(new Error('Bad response code: ' + res.statusCode));
}
console.log('>>> ' + body);
var r = querystring.parse(body);
// get id & profile:
that.requestAPI('GET', 'me', r.access_token, null, function (err, p) {
if (err) {
return callback(err);
}
callback(null, {
access_token: r.access_token,
refresh_token: '',
expires_in: parseInt(r.expires, 10),
auth_id: p.id,
name: p.name,
url: p.link,
image_url: ''
});
});
});
};
<|fim▁hole|>FacebookProvider.prototype.requestAPI = function (method, apiName, access_token, options, callback) {
options = options || {};
options.access_token = access_token;
var opts = {
method: method,
uri: 'https://graph.facebook.com/' + apiName,
timeout: 5000
};
if (method === 'GET') {
opts.qs = options;
}
if (method === 'POST') {
opts.form = options;
}
request(opts, function (err, res, body) {
if (err) {
return callback(err);
}
if (res.statusCode !== 200) {
return callback(new Error('Bad response code: ' + res.statusCode));
}
var r;
try {
r = JSON.parse(body);
} catch (e) {
return callback(e);
}
if (r.error) {
return callback(new Error(r.error.message));
}
callback(null, r);
});
};
module.exports = FacebookProvider;<|fim▁end|>
| |
<|file_name|>constant.py<|end_file_name|><|fim▁begin|>class constant():
folder_name = 'results'
MAX_HELP_POSITION = 27
CURRENT_VERSION = '0.9.1'
output = None
file_logger = None
# jitsi options
jitsi_masterpass = None
# mozilla options
manually = None
path = None
bruteforce = None
specific_path = None
mozilla_software = ''
# ie options
ie_historic = None
<|fim▁hole|>
# total password found
nbPasswordFound = 0
passwordFound = []<|fim▁end|>
| |
<|file_name|>shorten_path.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os
import re
<|fim▁hole|> print >>sys.stderr, "Usage: $0 <path> <length>"
sys.exit(1)
path = re.sub(os.getenv('HOME'), '~', path)
while len(path) > length:
dirs = path.split("/");
# Find the longest directory in the path.
max_index = -1
max_length = 3
for i in range(len(dirs) - 1):
if len(dirs[i]) > max_length:
max_index = i
max_length = len(dirs[i])
# Shorten it by one character.
if max_index >= 0:
dirs[max_index] = dirs[max_index][:max_length-3] + ".."
path = "/".join(dirs)
# Didn't find anything to shorten. This is as good as it gets.
else:
break
print(path)<|fim▁end|>
|
try:
path = sys.argv[1]
length = int(sys.argv[2])
except:
|
<|file_name|>Combination.py<|end_file_name|><|fim▁begin|>import bpy
from functions import *
class Combination():
'''A class containing all properties and methods
relative to combination settings for
Curve To Frame addon'''
def update_curves( self, context ):
'''method that must be over ride: update curve when settings have been changed'''
type(self).update_curves( self, context )
######################################
## combination settings ##
######################################
# method used to combine amplitude and peaks curve
combination_mode = bpy.props.EnumProperty(
name = 'Mode',
description = 'the way to combine amplitude and peaks curve',
default = 'ignore_peaks',
items = [
# (identifier, name,
# description, number)
('multiply', 'Peaks Curve Multiplied by amplitude',
'peaks is multiplied by \
amplitude percentage of maxi', 0),
('clamp_key', 'Peaks Keyframe Clamped to amplitude',
'peaks keyframe is clamped by amplitude', 1),
('clamp_curve', 'Peaks Curve Clamped to amplitude',
'all peaks value is clamped by amplitude', 2),
('ignore_amplitude', 'Only use peaks curve',
'Only use peaks curve', 3),
('ignore_peaks', 'Only use amplitude curve',
'Only use amplitude curve', 4)
],
update = update_curves
)
# combination of net amplitude and peaks curves
combination = bpy.props.FloatProperty(
name = "combination",
description = "Only to visualize the combination of \
peaks and amplitude curve curve. Can't \
be edit manually: use rate and amplitude settings.",
default = 0,
min = 0,
max = 1)
def update_combination_curve(
self,
clip,
context,
amplitude_net_curve,
peaks_curve):
'''update clip combination curve'''
# get combination mode curve
combination_enum = clip.curve_to_frame.bl_rna.\
properties['combination_mode'].enum_items
combination_mode = combination_enum.find( clip.curve_to_frame.combination_mode )
combination_mode_curve = get_fcurve_by_data_path(clip,
'curve_to_frame.combination_mode')
# get and initialize combination curve
combination_curve = get_fcurve_by_data_path(clip,
'curve_to_frame.combination')
if combination_curve is not None:
hide = combination_curve.hide
clip.animation_data.action.fcurves.remove(combination_curve)
else:
hide = True
clip.animation_data.action.fcurves.new(
'curve_to_frame.combination')
combination_curve = get_fcurve_by_data_path(clip,
'curve_to_frame.combination')
# get rate curve
rate_curve = get_fcurve_by_data_path(clip, 'curve_to_frame.rate')
# loop only on peak curve keyframe
for keyframe in peaks_curve.keyframe_points:
# get peaks keyframe value and frame
frame = keyframe.co[0]
value = max( min(1, keyframe.co[1]), 0 )
# get combination_mode at this frame
if combination_mode_curve is not None:
combination_mode = combination_mode_curve.evaluate(frame)
# generate keyframe
if combination_mode != 3 : # «combination mode == multiply or clamp
value = value * amplitude_net_curve.evaluate(frame)
if combination_mode != 4 :
combination_curve.keyframe_points.insert(frame, value)
combination_curve.keyframe_points[-1].interpolation = 'LINEAR'
# loop for all frame
end = max( peaks_curve.keyframe_points[-1].co[0],
context.scene.frame_end )
frame = start = context.scene.frame_start
while frame <= end:
# get combination_mode at this frame
if combination_mode_curve is not None:
combination_mode = combination_mode_curve.evaluate(frame)
if combination_mode == 0 : # combination mode is «multiply»
value = max( min( 1, peaks_curve.evaluate(frame) ), 0 )\
* amplitude_net_curve.evaluate(frame)
combination_curve.keyframe_points.insert(frame, value)
elif combination_mode == 2: # combination mode is «clamp_curve»
combination_curve.keyframe_points.insert(
frame,
max(
min (
amplitude_net_curve.evaluate(frame),
peaks_curve.evaluate(frame),
1
),
0
)
)
elif combination_mode == 4:
# combination mode is «ignore peaks»
combination_curve.keyframe_points.insert(
frame,
amplitude_net_curve.evaluate(frame)<|fim▁hole|> combination_curve.keyframe_points[-1].interpolation = 'LINEAR'
# next frame
frame += 1
#erase keyframe on flat section
avoid_useless_keyframe( combination_curve )
# prevent curve edition
combination_curve.lock = True
combination_curve.hide = hide
return combination_curve<|fim▁end|>
|
)
|
<|file_name|>DownloadLink.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import com.sdl.selenium.extjs3.ExtJsComponent;
import com.sdl.selenium.web.SearchType;
import com.sdl.selenium.web.WebLocator;
public class DownloadLink extends ExtJsComponent implements Download {
public DownloadLink() {
setClassName("DownloadLink");
setTag("a");
}
public DownloadLink(WebLocator container) {
this();
setContainer(container);
}
public DownloadLink(WebLocator container, String text) {
this(container);
setText(text, SearchType.EQUALS);
}
/**
* Wait for the element to be activated when there is deactivation mask on top of it
*
* @param seconds time
*/
@Override
public boolean waitToActivate(int seconds) {
return getXPath().contains("ext-ux-livegrid") || super.waitToActivate(seconds);
}
/**
* if WebDriverConfig.isSilentDownload() is true, se face silentDownload, is is false se face download with AutoIT.
* Download file with AutoIT, works only on FireFox. SilentDownload works FireFox and Chrome
* Use only this: button.download("C:\\TestSet.tmx");
* return true if the downloaded file is the same one that is meant to be downloaded, otherwise returns false.
*
* @param fileName e.g. "TestSet.tmx"
*/
@Override
public boolean download(String fileName) {
openBrowse();
return executor.download(fileName, 10000L);
}
private void openBrowse() {
executor.browse(this);
}
}<|fim▁end|>
|
package com.sdl.selenium.extjs3.button;
import com.sdl.selenium.bootstrap.button.Download;
|
<|file_name|>ExecutionGraphException.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
/** Base class for exceptions occurring in the {@link ExecutionGraph}. */
public class ExecutionGraphException extends Exception {
private static final long serialVersionUID = -8253451032797220657L;
public ExecutionGraphException(String message) {
super(message);
}
public ExecutionGraphException(String message, Throwable cause) {
super(message, cause);
}
public ExecutionGraphException(Throwable cause) {
super(cause);
}
}<|fim▁end|>
|
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
extern crate clap;
extern crate env_logger;
#[macro_use] extern crate error_chain;
extern crate futures;
extern crate intecture_api;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
extern crate tokio_core;
extern crate tokio_proto;
extern crate tokio_service;
extern crate toml;
mod errors;
use error_chain::ChainedError;
use errors::*;
use futures::{future, Future};
use intecture_api::host::local::Local;
use intecture_api::host::remote::JsonLineProto;
use intecture_api::{FromMessage, InMessage, Request};
use std::fs::File;
use std::io::{self, Read};
use std::net::SocketAddr;
use std::result;
use std::sync::Arc;
use tokio_core::reactor::Remote;
use tokio_proto::streaming::Message;
use tokio_proto::TcpServer;
use tokio_service::{NewService, Service};
pub struct Api {
host: Local,
}
pub struct NewApi {
remote: Remote,
}
<|fim▁hole|> type Error = Error;
type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;
fn call(&self, req: Self::Request) -> Self::Future {
let request = match Request::from_msg(req)
.chain_err(|| "Malformed Request")
{
Ok(r) => r,
Err(e) => return Box::new(future::ok(error_to_msg(e))),
};
Box::new(request.exec(&self.host)
.chain_err(|| "Failed to execute Request")
.then(|mut result| match result {
Ok(mut msg) => {
let mut reply = msg.get_mut();
reply = format!("{\"Ok\":\"{}\"}", reply);
future::ok(msg)
},
Err(e) => future::ok(error_to_msg(e))
}))
}
}
impl NewService for NewApi {
type Request = InMessage;
type Response = InMessage;
type Error = Error;
type Instance = Api;
fn new_service(&self) -> io::Result<Self::Instance> {
// XXX Danger zone! If we're running multiple threads, this `unwrap()`
// will explode. The API requires a `Handle`, but we can only send a
// `Remote` to this Service. Currently we force the `Handle`, which is
// only safe for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let handle = self.remote.handle().unwrap();
Ok(Api {
host: Local::new(&handle).wait().unwrap(),
})
}
}
#[derive(Deserialize)]
struct Config {
address: SocketAddr,
}
quick_main!(|| -> Result<()> {
env_logger::init().chain_err(|| "Could not start logging")?;
let matches = clap::App::new("Intecture Agent")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.arg(clap::Arg::with_name("config")
.short("c")
.long("config")
.value_name("FILE")
.help("Path to the agent configuration file")
.takes_value(true))
.arg(clap::Arg::with_name("addr")
.short("a")
.long("address")
.value_name("ADDR")
.help("Set the socket address this server will listen on (e.g. 0.0.0.0:7101)")
.takes_value(true))
.group(clap::ArgGroup::with_name("config_or_else")
.args(&["config", "addr"])
.required(true))
.get_matches();
let config = if let Some(c) = matches.value_of("config") {
let mut fh = File::open(c).chain_err(|| "Could not open config file")?;
let mut buf = Vec::new();
fh.read_to_end(&mut buf).chain_err(|| "Could not read config file")?;
toml::from_slice(&buf).chain_err(|| "Config file contained invalid TOML")?
} else {
let address = matches.value_of("addr").unwrap().parse().chain_err(|| "Invalid server address")?;
Config { address }
};
// XXX We can only run a single thread here, or big boom!!
// The API requires a `Handle`, but we can only send a `Remote`.
// Currently we force the issue (`unwrap()`), which is only safe
// for the current thread.
// See https://github.com/alexcrichton/tokio-process/issues/23
let server = TcpServer::new(JsonLineProto, config.address);
server.with_handle(move |handle| {
Arc::new(NewApi {
remote: handle.remote().clone(),
})
});
Ok(())
});
fn error_to_msg(e: Error) -> InMessage {
let response: result::Result<(), String> = Err(format!("{}", e.display_chain()));
// If we can't serialize this, we can't serialize anything, so
// panicking is appropriate.
let value = serde_json::to_value(response)
.expect("Cannot serialize ResponseResult::Err. This is bad...");
Message::WithoutBody(value)
}<|fim▁end|>
|
impl Service for Api {
type Request = InMessage;
type Response = InMessage;
|
<|file_name|>stock_landed_costs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
from openerp.tools import float_compare
from openerp.tools.translate import _
import product
class stock_landed_cost(osv.osv):
_name = 'stock.landed.cost'
_description = 'Stock Landed Cost'
_inherit = 'mail.thread'
_track = {
'state': {
'stock_landed_costs.mt_stock_landed_cost_open': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'done',
},
}
def _total_amount(self, cr, uid, ids, name, args, context=None):
result = {}
for cost in self.browse(cr, uid, ids, context=context):
total = 0.0
for line in cost.cost_lines:
total += line.price_unit
result[cost.id] = total
return result
def _get_cost_line(self, cr, uid, ids, context=None):
cost_to_recompute = []
for line in self.pool.get('stock.landed.cost.lines').browse(cr, uid, ids, context=context):
cost_to_recompute.append(line.cost_id.id)
return cost_to_recompute
def get_valuation_lines(self, cr, uid, ids, picking_ids=None, context=None):
picking_obj = self.pool.get('stock.picking')
lines = []
if not picking_ids:
return lines
for picking in picking_obj.browse(cr, uid, picking_ids):
for move in picking.move_lines:
#it doesn't make sense to make a landed cost for a product that isn't set as being valuated in real time at real cost
#if move.product_id.valuation != 'real_time' or move.product_id.cost_method != 'real':
# continue
total_cost = 0.0
total_qty = move.product_qty
weight = move.product_id and move.product_id.weight * move.product_qty
volume = move.product_id and move.product_id.volume * move.product_qty
for quant in move.quant_ids:
total_cost += quant.cost
vals = dict(product_id=move.product_id.id, move_id=move.id, quantity=move.product_uom_qty, former_cost=total_cost * total_qty, weight=weight, volume=volume)
lines.append(vals)
if not lines:
raise osv.except_osv(_('Error!'), _('The selected picking does not contain any move that would be impacted by landed costs. Landed costs are only possible for products configured in real time valuation with real price costing method. Please make sure it is the case, or you selected the correct picking'))
return lines
_columns = {
'name': fields.char('Name', track_visibility='always', readonly=True, copy=False),
'date': fields.date('Date', required=True, states={'done': [('readonly', True)]}, track_visibility='onchange', copy=False),
'picking_ids': fields.many2many('stock.picking', string='Pickings', states={'done': [('readonly', True)]}, copy=False),
'cost_lines': fields.one2many('stock.landed.cost.lines', 'cost_id', 'Cost Lines', states={'done': [('readonly', True)]}, copy=True),
'valuation_adjustment_lines': fields.one2many('stock.valuation.adjustment.lines', 'cost_id', 'Valuation Adjustments', states={'done': [('readonly', True)]}),
'description': fields.text('Item Description', states={'done': [('readonly', True)]}),
'amount_total': fields.function(_total_amount, type='float', string='Total', digits_compute=dp.get_precision('Account'),
store={
'stock.landed.cost': (lambda self, cr, uid, ids, c={}: ids, ['cost_lines'], 20),
'stock.landed.cost.lines': (_get_cost_line, ['price_unit', 'quantity', 'cost_id'], 20),
}, track_visibility='always'
),
'state': fields.selection([('draft', 'Draft'), ('done', 'Posted'), ('cancel', 'Cancelled')], 'State', readonly=True, track_visibility='onchange', copy=False),
'account_move_id': fields.many2one('account.move', 'Journal Entry', readonly=True, copy=False),
'account_journal_id': fields.many2one('account.journal', 'Account Journal', required=True, states={'done': [('readonly', True)]}),
}
_defaults = {
'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'stock.landed.cost'),
'state': 'draft',
'date': fields.date.context_today,
}
def _create_accounting_entries(self, cr, uid, line, move_id, qty_out, context=None):
product_obj = self.pool.get('product.template')
cost_product = line.cost_line_id and line.cost_line_id.product_id
if not cost_product:
return False
accounts = product_obj.get_product_accounts(cr, uid, line.product_id.product_tmpl_id.id, context=context)
debit_account_id = accounts['property_stock_valuation_account_id']
already_out_account_id = accounts['stock_account_output']
credit_account_id = line.cost_line_id.account_id.id or cost_product.property_account_expense.id or cost_product.categ_id.property_account_expense_categ.id
if not credit_account_id:
raise osv.except_osv(_('Error!'), _('Please configure Stock Expense Account for product: %s.') % (cost_product.name))
return self._create_account_move_line(cr, uid, line, move_id, credit_account_id, debit_account_id, qty_out, already_out_account_id, context=context)
def _create_account_move_line(self, cr, uid, line, move_id, credit_account_id, debit_account_id, qty_out, already_out_account_id, context=None):
"""
Generate the account.move.line values to track the landed cost.
Afterwards, for the goods that are already out of stock, we should create the out moves
"""
aml_obj = self.pool.get('account.move.line')
base_line = {
'name': line.name,<|fim▁hole|> 'product_id': line.product_id.id,
'quantity': line.quantity,
}
debit_line = dict(base_line, account_id=debit_account_id)
credit_line = dict(base_line, account_id=credit_account_id)
diff = line.additional_landed_cost
if diff > 0:
debit_line['debit'] = diff
credit_line['credit'] = diff
else:
# negative cost, reverse the entry
debit_line['credit'] = -diff
credit_line['debit'] = -diff
aml_obj.create(cr, uid, debit_line, context=context)
aml_obj.create(cr, uid, credit_line, context=context)
#Create account move lines for quants already out of stock
if qty_out > 0:
debit_line = dict(debit_line,
name=(line.name + ": " + str(qty_out) + _(' already out')),
quantity=qty_out)
credit_line = dict(credit_line,
name=(line.name + ": " + str(qty_out) + _(' already out')),
quantity=qty_out)
diff = diff * qty_out / line.quantity
if diff > 0:
debit_line['debit'] = diff
credit_line['credit'] = diff
else:
# negative cost, reverse the entry
debit_line['credit'] = -diff
credit_line['debit'] = -diff
aml_obj.create(cr, uid, debit_line, context=context)
aml_obj.create(cr, uid, credit_line, context=context)
return True
def _create_account_move(self, cr, uid, cost, context=None):
vals = {
'journal_id': cost.account_journal_id.id,
'period_id': self.pool.get('account.period').find(cr, uid, cost.date, context=context)[0],
'date': cost.date,
'ref': cost.name
}
return self.pool.get('account.move').create(cr, uid, vals, context=context)
def _check_sum(self, cr, uid, landed_cost, context=None):
"""
Will check if each cost line its valuation lines sum to the correct amount
and if the overall total amount is correct also
"""
costcor = {}
tot = 0
for valuation_line in landed_cost.valuation_adjustment_lines:
if costcor.get(valuation_line.cost_line_id):
costcor[valuation_line.cost_line_id] += valuation_line.additional_landed_cost
else:
costcor[valuation_line.cost_line_id] = valuation_line.additional_landed_cost
tot += valuation_line.additional_landed_cost
prec = self.pool['decimal.precision'].precision_get(cr, uid, 'Account')
# float_compare returns 0 for equal amounts
res = not bool(float_compare(tot, landed_cost.amount_total, precision_digits=prec))
for costl in costcor.keys():
if float_compare(costcor[costl], costl.price_unit, precision_digits=prec):
res = False
return res
def button_validate(self, cr, uid, ids, context=None):
quant_obj = self.pool.get('stock.quant')
for cost in self.browse(cr, uid, ids, context=context):
if cost.state != 'draft':
raise Warning(_('Only draft landed costs can be validated'))
if not cost.valuation_adjustment_lines or not self._check_sum(cr, uid, cost, context=context):
raise osv.except_osv(_('Error!'), _('You cannot validate a landed cost which has no valid valuation lines.'))
move_id = self._create_account_move(cr, uid, cost, context=context)
quant_dict = {}
for line in cost.valuation_adjustment_lines:
if not line.move_id:
continue
per_unit = line.final_cost / line.quantity
diff = per_unit - line.former_cost_per_unit
quants = [quant for quant in line.move_id.quant_ids]
for quant in quants:
if quant.id not in quant_dict:
quant_dict[quant.id] = quant.cost + diff
else:
quant_dict[quant.id] += diff
for key, value in quant_dict.items():
print value
quant_obj.write(cr, uid, key, {'cost': value}, context=context)
qty_out = 0
for quant in line.move_id.quant_ids:
if quant.location_id.usage != 'internal':
qty_out += quant.qty
self._create_accounting_entries(cr, uid, line, move_id, qty_out, context=context)
self.write(cr, uid, cost.id, {'state': 'done', 'account_move_id': move_id}, context=context)
return True
def button_cancel(self, cr, uid, ids, context=None):
cost = self.browse(cr, uid, ids, context=context)
if cost.state == 'done':
raise Warning(_('Validated landed costs cannot be cancelled, '
'but you could create negative landed costs to reverse them'))
return cost.write({'state': 'cancel'})
def unlink(self, cr, uid, ids, context=None):
# cancel or raise first
self.button_cancel(cr, uid, ids, context)
return super(stock_landed_cost, self).unlink(cr, uid, ids, context=context)
def compute_landed_cost(self, cr, uid, ids, context=None):
line_obj = self.pool.get('stock.valuation.adjustment.lines')
unlink_ids = line_obj.search(cr, uid, [('cost_id', 'in', ids)], context=context)
line_obj.unlink(cr, uid, unlink_ids, context=context)
towrite_dict = {}
for cost in self.browse(cr, uid, ids, context=None):
if not cost.picking_ids:
continue
picking_ids = [p.id for p in cost.picking_ids]
total_qty = 0.0
total_cost = 0.0
total_weight = 0.0
total_volume = 0.0
total_line = 0.0
vals = self.get_valuation_lines(cr, uid, [cost.id], picking_ids=picking_ids, context=context)
for v in vals:
for line in cost.cost_lines:
v.update({'cost_id': cost.id, 'cost_line_id': line.id})
self.pool.get('stock.valuation.adjustment.lines').create(cr, uid, v, context=context)
total_qty += v.get('quantity', 0.0)
total_cost += v.get('former_cost', 0.0)
total_weight += v.get('weight', 0.0)
total_volume += v.get('volume', 0.0)
total_line += 1
for line in cost.cost_lines:
for valuation in cost.valuation_adjustment_lines:
value = 0.0
if valuation.cost_line_id and valuation.cost_line_id.id == line.id:
if line.split_method == 'by_quantity' and total_qty:
per_unit = (line.price_unit / total_qty)
value = valuation.quantity * per_unit
elif line.split_method == 'by_weight' and total_weight:
per_unit = (line.price_unit / total_weight)
value = valuation.weight * per_unit
elif line.split_method == 'by_volume' and total_volume:
per_unit = (line.price_unit / total_volume)
value = valuation.volume * per_unit
elif line.split_method == 'equal':
value = (line.price_unit / total_line)
elif line.split_method == 'by_current_cost_price' and total_cost:
per_unit = (line.price_unit / total_cost)
value = valuation.former_cost * per_unit
else:
value = (line.price_unit / total_line)
if valuation.id not in towrite_dict:
towrite_dict[valuation.id] = value
else:
towrite_dict[valuation.id] += value
if towrite_dict:
for key, value in towrite_dict.items():
line_obj.write(cr, uid, key, {'additional_landed_cost': value}, context=context)
return True
class stock_landed_cost_lines(osv.osv):
_name = 'stock.landed.cost.lines'
_description = 'Stock Landed Cost Lines'
def onchange_product_id(self, cr, uid, ids, product_id=False, context=None):
result = {}
if not product_id:
return {'value': {'quantity': 0.0, 'price_unit': 0.0}}
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
result['name'] = product.name
result['split_method'] = product.split_method
result['price_unit'] = product.standard_price
result['account_id'] = product.property_account_expense and product.property_account_expense.id or product.categ_id.property_account_expense_categ.id
return {'value': result}
_columns = {
'name': fields.char('Description'),
'cost_id': fields.many2one('stock.landed.cost', 'Landed Cost', required=True, ondelete='cascade'),
'product_id': fields.many2one('product.product', 'Product', required=True),
'price_unit': fields.float('Cost', required=True, digits_compute=dp.get_precision('Product Price')),
'split_method': fields.selection(product.SPLIT_METHOD, string='Split Method', required=True),
'account_id': fields.many2one('account.account', 'Account', domain=[('type', '<>', 'view'), ('type', '<>', 'closed')]),
}
class stock_valuation_adjustment_lines(osv.osv):
_name = 'stock.valuation.adjustment.lines'
_description = 'Stock Valuation Adjustment Lines'
def _amount_final(self, cr, uid, ids, name, args, context=None):
result = {}
for line in self.browse(cr, uid, ids, context=context):
result[line.id] = {
'former_cost_per_unit': 0.0,
'final_cost': 0.0,
}
result[line.id]['former_cost_per_unit'] = (line.former_cost / line.quantity if line.quantity else 1.0)
result[line.id]['final_cost'] = (line.former_cost + line.additional_landed_cost)
return result
def _get_name(self, cr, uid, ids, name, arg, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = line.product_id.code or line.product_id.name or ''
if line.cost_line_id:
res[line.id] += ' - ' + line.cost_line_id.name
return res
_columns = {
'name': fields.function(_get_name, type='char', string='Description', store=True),
'cost_id': fields.many2one('stock.landed.cost', 'Landed Cost', required=True, ondelete='cascade'),
'cost_line_id': fields.many2one('stock.landed.cost.lines', 'Cost Line', readonly=True),
'move_id': fields.many2one('stock.move', 'Stock Move', readonly=True),
'product_id': fields.many2one('product.product', 'Product', required=True),
'quantity': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'weight': fields.float('Weight', digits_compute=dp.get_precision('Product Unit of Measure')),
'volume': fields.float('Volume', digits_compute=dp.get_precision('Product Unit of Measure')),
'former_cost': fields.float('Former Cost', digits_compute=dp.get_precision('Product Price')),
'former_cost_per_unit': fields.function(_amount_final, multi='cost', string='Former Cost(Per Unit)', type='float', digits_compute=dp.get_precision('Account'), store=True),
'additional_landed_cost': fields.float('Additional Landed Cost', digits_compute=dp.get_precision('Product Price')),
'final_cost': fields.function(_amount_final, multi='cost', string='Final Cost', type='float', digits_compute=dp.get_precision('Account'), store=True),
}
_defaults = {
'quantity': 1.0,
'weight': 1.0,
'volume': 1.0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
|
'move_id': move_id,
|
<|file_name|>default_keyvalue_differ.js<|end_file_name|><|fim▁begin|>'use strict';var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") return Reflect.decorate(decorators, target, key, desc);
switch (arguments.length) {
case 2: return decorators.reduceRight(function(o, d) { return (d && d(o)) || o; }, target);
case 3: return decorators.reduceRight(function(o, d) { return (d && d(target, key)), void 0; }, void 0);
case 4: return decorators.reduceRight(function(o, d) { return (d && d(target, key, o)) || o; }, desc);
}
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var collection_1 = require('angular2/src/facade/collection');
var lang_1 = require('angular2/src/facade/lang');
var exceptions_1 = require('angular2/src/facade/exceptions');
var DefaultKeyValueDifferFactory = (function () {
function DefaultKeyValueDifferFactory() {
}
DefaultKeyValueDifferFactory.prototype.supports = function (obj) { return obj instanceof Map || lang_1.isJsObject(obj); };
DefaultKeyValueDifferFactory.prototype.create = function (cdRef) { return new DefaultKeyValueDiffer(); };
DefaultKeyValueDifferFactory = __decorate([
lang_1.CONST(),
__metadata('design:paramtypes', [])
], DefaultKeyValueDifferFactory);
return DefaultKeyValueDifferFactory;
})();
exports.DefaultKeyValueDifferFactory = DefaultKeyValueDifferFactory;
var DefaultKeyValueDiffer = (function () {
function DefaultKeyValueDiffer() {
this._records = new Map();
this._mapHead = null;
this._previousMapHead = null;
this._changesHead = null;
this._changesTail = null;
this._additionsHead = null;
this._additionsTail = null;
this._removalsHead = null;
this._removalsTail = null;
}
Object.defineProperty(DefaultKeyValueDiffer.prototype, "isDirty", {
get: function () {
return this._additionsHead !== null || this._changesHead !== null ||
this._removalsHead !== null;
},
enumerable: true,
configurable: true
});
DefaultKeyValueDiffer.prototype.forEachItem = function (fn) {
var record;
for (record = this._mapHead; record !== null; record = record._next) {
fn(record);
}
};
DefaultKeyValueDiffer.prototype.forEachPreviousItem = function (fn) {
var record;
for (record = this._previousMapHead; record !== null; record = record._nextPrevious) {
fn(record);
}
};
DefaultKeyValueDiffer.prototype.forEachChangedItem = function (fn) {
var record;
for (record = this._changesHead; record !== null; record = record._nextChanged) {
fn(record);
}
};
DefaultKeyValueDiffer.prototype.forEachAddedItem = function (fn) {
var record;
for (record = this._additionsHead; record !== null; record = record._nextAdded) {
fn(record);
}
};
DefaultKeyValueDiffer.prototype.forEachRemovedItem = function (fn) {
var record;
for (record = this._removalsHead; record !== null; record = record._nextRemoved) {
fn(record);
}
};
DefaultKeyValueDiffer.prototype.diff = function (map) {
if (lang_1.isBlank(map))
map = collection_1.MapWrapper.createFromPairs([]);
if (!(map instanceof Map || lang_1.isJsObject(map))) {
throw new exceptions_1.BaseException("Error trying to diff '" + map + "'");
}
if (this.check(map)) {
return this;
}
else {
return null;
}
};
DefaultKeyValueDiffer.prototype.onDestroy = function () { };
DefaultKeyValueDiffer.prototype.check = function (map) {
var _this = this;
this._reset();
var records = this._records;
var oldSeqRecord = this._mapHead;
var lastOldSeqRecord = null;
var lastNewSeqRecord = null;
var seqChanged = false;
this._forEach(map, function (value, key) {
var newSeqRecord;
if (oldSeqRecord !== null && key === oldSeqRecord.key) {
newSeqRecord = oldSeqRecord;
if (!lang_1.looseIdentical(value, oldSeqRecord.currentValue)) {
oldSeqRecord.previousValue = oldSeqRecord.currentValue;
oldSeqRecord.currentValue = value;
_this._addToChanges(oldSeqRecord);
}
}
else {
seqChanged = true;
if (oldSeqRecord !== null) {
oldSeqRecord._next = null;
_this._removeFromSeq(lastOldSeqRecord, oldSeqRecord);
_this._addToRemovals(oldSeqRecord);
}
if (records.has(key)) {
newSeqRecord = records.get(key);
}
else {
newSeqRecord = new KVChangeRecord(key);
records.set(key, newSeqRecord);
newSeqRecord.currentValue = value;
_this._addToAdditions(newSeqRecord);
}
}
if (seqChanged) {
if (_this._isInRemovals(newSeqRecord)) {
_this._removeFromRemovals(newSeqRecord);
}
if (lastNewSeqRecord == null) {
_this._mapHead = newSeqRecord;
}
else {
lastNewSeqRecord._next = newSeqRecord;
}
}
lastOldSeqRecord = oldSeqRecord;
lastNewSeqRecord = newSeqRecord;
oldSeqRecord = oldSeqRecord === null ? null : oldSeqRecord._next;
});
this._truncate(lastOldSeqRecord, oldSeqRecord);
return this.isDirty;
};
/** @internal */
DefaultKeyValueDiffer.prototype._reset = function () {
if (this.isDirty) {
var record;
// Record the state of the mapping
for (record = this._previousMapHead = this._mapHead; record !== null; record = record._next) {
record._nextPrevious = record._next;
}
for (record = this._changesHead; record !== null; record = record._nextChanged) {
record.previousValue = record.currentValue;
}
for (record = this._additionsHead; record != null; record = record._nextAdded) {
record.previousValue = record.currentValue;
}
// todo(vicb) once assert is supported
// assert(() {
// var r = _changesHead;
// while (r != null) {
// var nextRecord = r._nextChanged;
// r._nextChanged = null;
// r = nextRecord;
// }
//
// r = _additionsHead;
// while (r != null) {
// var nextRecord = r._nextAdded;
// r._nextAdded = null;
// r = nextRecord;
// }
//
// r = _removalsHead;
// while (r != null) {
// var nextRecord = r._nextRemoved;
// r._nextRemoved = null;
// r = nextRecord;
// }
//
// return true;
//});
this._changesHead = this._changesTail = null;
this._additionsHead = this._additionsTail = null;
this._removalsHead = this._removalsTail = null;
}
};
/** @internal */
DefaultKeyValueDiffer.prototype._truncate = function (lastRecord, record) {
while (record !== null) {
if (lastRecord === null) {
this._mapHead = null;
}
else {
lastRecord._next = null;
}
var nextRecord = record._next;
// todo(vicb) assert
// assert((() {
// record._next = null;
// return true;
//}));
this._addToRemovals(record);
lastRecord = record;
record = nextRecord;
}
for (var rec = this._removalsHead; rec !== null; rec = rec._nextRemoved) {
rec.previousValue = rec.currentValue;
rec.currentValue = null;
this._records.delete(rec.key);
}
};
/** @internal */
DefaultKeyValueDiffer.prototype._isInRemovals = function (record) {
return record === this._removalsHead || record._nextRemoved !== null ||
record._prevRemoved !== null;
};
/** @internal */
DefaultKeyValueDiffer.prototype._addToRemovals = function (record) {
// todo(vicb) assert
// assert(record._next == null);
// assert(record._nextAdded == null);
// assert(record._nextChanged == null);
// assert(record._nextRemoved == null);
// assert(record._prevRemoved == null);
if (this._removalsHead === null) {
this._removalsHead = this._removalsTail = record;<|fim▁hole|> }
else {
this._removalsTail._nextRemoved = record;
record._prevRemoved = this._removalsTail;
this._removalsTail = record;
}
};
/** @internal */
DefaultKeyValueDiffer.prototype._removeFromSeq = function (prev, record) {
var next = record._next;
if (prev === null) {
this._mapHead = next;
}
else {
prev._next = next;
}
// todo(vicb) assert
// assert((() {
// record._next = null;
// return true;
//})());
};
/** @internal */
DefaultKeyValueDiffer.prototype._removeFromRemovals = function (record) {
// todo(vicb) assert
// assert(record._next == null);
// assert(record._nextAdded == null);
// assert(record._nextChanged == null);
var prev = record._prevRemoved;
var next = record._nextRemoved;
if (prev === null) {
this._removalsHead = next;
}
else {
prev._nextRemoved = next;
}
if (next === null) {
this._removalsTail = prev;
}
else {
next._prevRemoved = prev;
}
record._prevRemoved = record._nextRemoved = null;
};
/** @internal */
DefaultKeyValueDiffer.prototype._addToAdditions = function (record) {
// todo(vicb): assert
// assert(record._next == null);
// assert(record._nextAdded == null);
// assert(record._nextChanged == null);
// assert(record._nextRemoved == null);
// assert(record._prevRemoved == null);
if (this._additionsHead === null) {
this._additionsHead = this._additionsTail = record;
}
else {
this._additionsTail._nextAdded = record;
this._additionsTail = record;
}
};
/** @internal */
DefaultKeyValueDiffer.prototype._addToChanges = function (record) {
// todo(vicb) assert
// assert(record._nextAdded == null);
// assert(record._nextChanged == null);
// assert(record._nextRemoved == null);
// assert(record._prevRemoved == null);
if (this._changesHead === null) {
this._changesHead = this._changesTail = record;
}
else {
this._changesTail._nextChanged = record;
this._changesTail = record;
}
};
DefaultKeyValueDiffer.prototype.toString = function () {
var items = [];
var previous = [];
var changes = [];
var additions = [];
var removals = [];
var record;
for (record = this._mapHead; record !== null; record = record._next) {
items.push(lang_1.stringify(record));
}
for (record = this._previousMapHead; record !== null; record = record._nextPrevious) {
previous.push(lang_1.stringify(record));
}
for (record = this._changesHead; record !== null; record = record._nextChanged) {
changes.push(lang_1.stringify(record));
}
for (record = this._additionsHead; record !== null; record = record._nextAdded) {
additions.push(lang_1.stringify(record));
}
for (record = this._removalsHead; record !== null; record = record._nextRemoved) {
removals.push(lang_1.stringify(record));
}
return "map: " + items.join(', ') + "\n" + "previous: " + previous.join(', ') + "\n" +
"additions: " + additions.join(', ') + "\n" + "changes: " + changes.join(', ') + "\n" +
"removals: " + removals.join(', ') + "\n";
};
/** @internal */
DefaultKeyValueDiffer.prototype._forEach = function (obj, fn) {
if (obj instanceof Map) {
obj.forEach(fn);
}
else {
collection_1.StringMapWrapper.forEach(obj, fn);
}
};
return DefaultKeyValueDiffer;
})();
exports.DefaultKeyValueDiffer = DefaultKeyValueDiffer;
var KVChangeRecord = (function () {
function KVChangeRecord(key) {
this.key = key;
this.previousValue = null;
this.currentValue = null;
/** @internal */
this._nextPrevious = null;
/** @internal */
this._next = null;
/** @internal */
this._nextAdded = null;
/** @internal */
this._nextRemoved = null;
/** @internal */
this._prevRemoved = null;
/** @internal */
this._nextChanged = null;
}
KVChangeRecord.prototype.toString = function () {
return lang_1.looseIdentical(this.previousValue, this.currentValue) ?
lang_1.stringify(this.key) :
(lang_1.stringify(this.key) + '[' + lang_1.stringify(this.previousValue) + '->' +
lang_1.stringify(this.currentValue) + ']');
};
return KVChangeRecord;
})();
exports.KVChangeRecord = KVChangeRecord;
//# sourceMappingURL=default_keyvalue_differ.js.map<|fim▁end|>
| |
<|file_name|>linkbubble_test.js<|end_file_name|><|fim▁begin|>// Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.provide('goog.editor.plugins.LinkBubbleTest');
goog.setTestOnly('goog.editor.plugins.LinkBubbleTest');
goog.require('goog.dom');
goog.require('goog.dom.Range');
goog.require('goog.dom.TagName');
goog.require('goog.editor.Command');
goog.require('goog.editor.Link');
goog.require('goog.editor.plugins.LinkBubble');
goog.require('goog.events.BrowserEvent');
goog.require('goog.events.Event');
goog.require('goog.events.EventType');
goog.require('goog.events.KeyCodes');
goog.require('goog.string');
goog.require('goog.style');
goog.require('goog.testing.FunctionMock');
goog.require('goog.testing.PropertyReplacer');
goog.require('goog.testing.editor.FieldMock');
goog.require('goog.testing.editor.TestHelper');
goog.require('goog.testing.events');
goog.require('goog.testing.jsunit');
goog.require('goog.userAgent');
var fieldDiv;
var FIELDMOCK;
var linkBubble;
var link;
var linkChild;
var mockWindowOpen;
var stubs;
var testHelper;
function setUpPage() {
fieldDiv = goog.dom.$('field');
stubs = new goog.testing.PropertyReplacer();
testHelper = new goog.testing.editor.TestHelper(goog.dom.getElement('field'));
}
function setUp() {
testHelper.setUpEditableElement();<|fim▁hole|> FIELDMOCK = new goog.testing.editor.FieldMock();
linkBubble = new goog.editor.plugins.LinkBubble();
linkBubble.fieldObject = FIELDMOCK;
link = fieldDiv.firstChild;
linkChild = link.lastChild;
mockWindowOpen = new goog.testing.FunctionMock('open');
stubs.set(window, 'open', mockWindowOpen);
}
function tearDown() {
linkBubble.closeBubble();
testHelper.tearDownEditableElement();
stubs.reset();
}
function testLinkSelected() {
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
goog.dom.Range.createFromNodeContents(link).select();
linkBubble.handleSelectionChange();
assertBubble();
FIELDMOCK.$verify();
}
function testLinkClicked() {
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
FIELDMOCK.$verify();
}
function testImageLink() {
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
link.setAttribute('imageanchor', 1);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
FIELDMOCK.$verify();
}
function closeBox() {
var closeBox = goog.dom.getElementsByTagNameAndClass(goog.dom.TagName.DIV,
'tr_bubble_closebox');
assertEquals('Should find only one close box', 1, closeBox.length);
assertNotNull('Found close box', closeBox[0]);
goog.testing.events.fireClickSequence(closeBox[0]);
}
function testCloseBox() {
testLinkClicked();
closeBox();
assertNoBubble();
FIELDMOCK.$verify();
}
function testChangeClicked() {
FIELDMOCK.execCommand(goog.editor.Command.MODAL_LINK_EDITOR,
new goog.editor.Link(link, false));
FIELDMOCK.$registerArgumentListVerifier('execCommand', function(arr1, arr2) {
return arr1.length == arr2.length &&
arr1.length == 2 &&
arr1[0] == goog.editor.Command.MODAL_LINK_EDITOR &&
arr2[0] == goog.editor.Command.MODAL_LINK_EDITOR &&
arr1[1] instanceof goog.editor.Link &&
arr2[1] instanceof goog.editor.Link;
});
FIELDMOCK.$times(1);
FIELDMOCK.$returns(true);
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
goog.testing.events.fireClickSequence(
goog.dom.$(goog.editor.plugins.LinkBubble.CHANGE_LINK_ID_));
assertNoBubble();
FIELDMOCK.$verify();
}
function testChangePressed() {
FIELDMOCK.execCommand(goog.editor.Command.MODAL_LINK_EDITOR,
new goog.editor.Link(link, false));
FIELDMOCK.$registerArgumentListVerifier('execCommand', function(arr1, arr2) {
return arr1.length == arr2.length &&
arr1.length == 2 &&
arr1[0] == goog.editor.Command.MODAL_LINK_EDITOR &&
arr2[0] == goog.editor.Command.MODAL_LINK_EDITOR &&
arr1[1] instanceof goog.editor.Link &&
arr2[1] instanceof goog.editor.Link;
});
FIELDMOCK.$times(1);
FIELDMOCK.$returns(true);
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
var defaultPrevented = !goog.testing.events.fireKeySequence(
goog.dom.$(goog.editor.plugins.LinkBubble.CHANGE_LINK_ID_),
goog.events.KeyCodes.ENTER);
assertTrue(defaultPrevented);
assertNoBubble();
FIELDMOCK.$verify();
}
function testDeleteClicked() {
FIELDMOCK.dispatchBeforeChange();
FIELDMOCK.$times(1);
FIELDMOCK.dispatchChange();
FIELDMOCK.$times(1);
FIELDMOCK.focus();
FIELDMOCK.$times(1);
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
goog.testing.events.fireClickSequence(
goog.dom.$(goog.editor.plugins.LinkBubble.DELETE_LINK_ID_));
var element = goog.userAgent.GECKO ? document.body : fieldDiv;
assertNotEquals('Link removed', element.firstChild.nodeName,
goog.dom.TagName.A);
assertNoBubble();
var range = goog.dom.Range.createFromWindow();
assertEquals('Link selection on link text', linkChild, range.getEndNode());
assertEquals('Link selection on link text end',
goog.dom.getRawTextContent(linkChild).length, range.getEndOffset());
FIELDMOCK.$verify();
}
function testDeletePressed() {
FIELDMOCK.dispatchBeforeChange();
FIELDMOCK.$times(1);
FIELDMOCK.dispatchChange();
FIELDMOCK.$times(1);
FIELDMOCK.focus();
FIELDMOCK.$times(1);
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
var defaultPrevented = !goog.testing.events.fireKeySequence(
goog.dom.$(goog.editor.plugins.LinkBubble.DELETE_LINK_ID_),
goog.events.KeyCodes.ENTER);
assertTrue(defaultPrevented);
var element = goog.userAgent.GECKO ? document.body : fieldDiv;
assertNotEquals('Link removed', element.firstChild.nodeName,
goog.dom.TagName.A);
assertNoBubble();
var range = goog.dom.Range.createFromWindow();
assertEquals('Link selection on link text', linkChild, range.getEndNode());
assertEquals('Link selection on link text end',
goog.dom.getRawTextContent(linkChild).length, range.getEndOffset());
FIELDMOCK.$verify();
}
function testActionClicked() {
var SPAN = 'actionSpanId';
var LINK = 'actionLinkId';
var toShowCount = 0;
var actionCount = 0;
var linkAction = new goog.editor.plugins.LinkBubble.Action(
SPAN, LINK, 'message',
function() {
toShowCount++;
return toShowCount == 1; // Show it the first time.
},
function() {
actionCount++;
});
linkBubble = new goog.editor.plugins.LinkBubble(linkAction);
linkBubble.fieldObject = FIELDMOCK;
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
// The first time the bubble is shown, show our custom action.
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
assertEquals('Should check showing the action', 1, toShowCount);
assertEquals('Action should not have fired yet', 0, actionCount);
assertTrue('Action should be visible 1st time', goog.style.isElementShown(
goog.dom.$(SPAN)));
goog.testing.events.fireClickSequence(goog.dom.$(LINK));
assertEquals('Should not check showing again yet', 1, toShowCount);
assertEquals('Action should be fired', 1, actionCount);
closeBox();
assertNoBubble();
// The action won't be shown the second time around.
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
assertEquals('Should check showing again', 2, toShowCount);
assertEquals('Action should not fire again', 1, actionCount);
assertFalse('Action should not be shown 2nd time', goog.style.isElementShown(
goog.dom.$(SPAN)));
FIELDMOCK.$verify();
}
function testLinkTextClicked() {
mockWindowOpen('http://www.google.com/', '_blank', '');
mockWindowOpen.$replay();
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
goog.testing.events.fireClickSequence(
goog.dom.$(goog.editor.plugins.LinkBubble.TEST_LINK_ID_));
assertBubble();
mockWindowOpen.$verify();
FIELDMOCK.$verify();
}
function testLinkTextClickedCustomUrlFn() {
mockWindowOpen('http://images.google.com/', '_blank', '');
mockWindowOpen.$replay();
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
linkBubble.setTestLinkUrlFn(function(url) {
return url.replace('www', 'images');
});
linkBubble.handleSelectionChange(createMouseEvent(link));
assertBubble();
goog.testing.events.fireClickSequence(
goog.dom.$(goog.editor.plugins.LinkBubble.TEST_LINK_ID_));
assertBubble();
mockWindowOpen.$verify();
FIELDMOCK.$verify();
}
/**
* Urls with invalid schemes shouldn't be linkified.
* @bug 2585360
*/
function testDontLinkifyInvalidScheme() {
mockWindowOpen.$replay();
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
var badLink = document.createElement(goog.dom.TagName.A);
badLink.href = 'javascript:alert(1)';
badLink.innerHTML = 'bad link';
linkBubble.handleSelectionChange(createMouseEvent(badLink));
assertBubble();
// The link shouldn't exist at all
assertNull(goog.dom.$(goog.editor.plugins.LinkBubble.TEST_LINK_ID_));
assertBubble();
mockWindowOpen.$verify();
FIELDMOCK.$verify();
}
function testIsSafeSchemeToOpen() {
// Urls with no scheme at all are ok too since 'http://' will be prepended.
var good = [
'http://google.com', 'http://google.com/', 'https://google.com',
'[email protected]', 'http://www.google.com', 'http://site.com',
'google.com', 'google', 'http://google', 'HTTP://GOOGLE.COM',
'HtTp://www.google.com'
];
var bad = [
'javascript:google.com', 'httpp://google.com', 'data:foo',
'javascript:alert(\'hi\');', 'abc:def'
];
for (var i = 0; i < good.length; i++) {
assertTrue(good[i] + ' should have a safe scheme',
linkBubble.isSafeSchemeToOpen_(good[i]));
}
for (i = 0; i < bad.length; i++) {
assertFalse(bad[i] + ' should have an unsafe scheme',
linkBubble.isSafeSchemeToOpen_(bad[i]));
}
}
function testShouldOpenWithWhitelist() {
linkBubble.setSafeToOpenSchemes(['abc']);
assertTrue('Scheme should be safe',
linkBubble.shouldOpenUrl('abc://google.com'));
assertFalse('Scheme should be unsafe',
linkBubble.shouldOpenUrl('http://google.com'));
linkBubble.setBlockOpeningUnsafeSchemes(false);
assertTrue('Non-whitelisted should now be safe after disabling blocking',
linkBubble.shouldOpenUrl('http://google.com'));
}
/**
* @bug 763211
* @bug 2182147
*/
function testLongUrlTestLinkAnchorTextCorrect() {
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
var longUrl = 'http://www.reallylonglinkthatshouldbetruncated' +
'becauseitistoolong.com';
var truncatedLongUrl = goog.string.truncateMiddle(longUrl, 48);
var longLink = document.createElement(goog.dom.TagName.A);
longLink.href = longUrl;
longLink.innerHTML = 'Google';
fieldDiv.appendChild(longLink);
linkBubble.handleSelectionChange(createMouseEvent(longLink));
assertBubble();
var testLinkEl = goog.dom.$(goog.editor.plugins.LinkBubble.TEST_LINK_ID_);
assertEquals(
'The test link\'s anchor text should be the truncated URL.',
truncatedLongUrl,
testLinkEl.innerHTML);
fieldDiv.removeChild(longLink);
FIELDMOCK.$verify();
}
/**
* @bug 2416024
*/
function testOverridingCreateBubbleContentsDoesntNpeGetTargetUrl() {
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
stubs.set(linkBubble, 'createBubbleContents',
function(elem) {
// getTargetUrl would cause an NPE if urlUtil_ wasn't defined yet.
linkBubble.getTargetUrl();
});
assertNotThrows('Accessing this.urlUtil_ should not NPE',
goog.bind(linkBubble.handleSelectionChange,
linkBubble, createMouseEvent(link)));
FIELDMOCK.$verify();
}
/**
* @bug 15379294
*/
function testUpdateLinkCommandDoesNotTriggerAnException() {
FIELDMOCK.$replay();
linkBubble.enable(FIELDMOCK);
// At this point, the bubble was not created yet using its createBubble
// public method.
assertNotThrows(
'Executing goog.editor.Command.UPDATE_LINK_BUBBLE should not trigger ' +
'an exception even if the bubble was not created yet using its ' +
'createBubble method.',
goog.bind(linkBubble.execCommandInternal, linkBubble,
goog.editor.Command.UPDATE_LINK_BUBBLE));
FIELDMOCK.$verify();
}
function assertBubble() {
assertTrue('Link bubble visible', linkBubble.isVisible());
assertNotNull('Link bubble created',
goog.dom.$(goog.editor.plugins.LinkBubble.LINK_DIV_ID_));
}
function assertNoBubble() {
assertFalse('Link bubble not visible', linkBubble.isVisible());
assertNull('Link bubble not created',
goog.dom.$(goog.editor.plugins.LinkBubble.LINK_DIV_ID_));
}
function createMouseEvent(target) {
var eventObj = new goog.events.Event(goog.events.EventType.MOUSEUP, target);
eventObj.button = goog.events.BrowserEvent.MouseButton.LEFT;
return new goog.events.BrowserEvent(eventObj, target);
}<|fim▁end|>
| |
<|file_name|>filter-dropdown-number-range.js<|end_file_name|><|fim▁begin|>import m from 'mithril';
import prop from 'mithril/stream';
import h from '../h';
import _ from 'underscore';
const EnterKey = 13;
const innerFieldInput = {
oninit: function(vnode) {
const inputState = {<|fim▁hole|> setValue: function(value) {
value = (''+value).replace(/[^0-9]*/g, '');
value = Math.abs(parseInt(value));
inputState.value(value);
}
}
vnode.state = { inputState };
},
view: function({state, attrs}) {
const defaultInputOptions = {
onchange: m.withAttr('value', state.inputState.setValue),
value: state.inputState.value(),
onkeyup: (e) => {
if (e.keyCode == EnterKey)
attrs.onsetValue();
state.inputState.setValue(e.target.value)
}
};
let inputExtraProps = '';
if ('min' in attrs) inputExtraProps += `[min='${attrs.min}']`;
if ('max' in attrs) inputExtraProps += `[max='${attrs.max}']`;
if ('placeholder' in attrs) inputExtraProps += `[placeholder='${attrs.placeholder}']`;
else inputExtraProps += `[placeholder=' ']`;
return attrs.shouldRenderInnerFieldLabel ?
m(`input.text-field.positive.w-input[type='number']${inputExtraProps}`, defaultInputOptions)
:
m('.w-row', [
m('.text-field.positive.prefix.no-hover.w-col.w-col-3.w-col-small-3.w-col-tiny-3',
m('.fontsize-smallest.fontcolor-secondary.u-text-center', attrs.label)
),
m('.w-col.w-col-9.w-col-small-9.w-col-tiny-9',
m(`input.text-field.postfix.positive.w-input[type='number']${inputExtraProps}`, defaultInputOptions)
)
]);
}
}
const filterDropdownNumberRange = {
oninit: function (vnode) {
const
firstValue = prop(0),
secondValue = prop(0),
clearFieldValues = () => { firstValue(0), secondValue(0) },
getNumericValue = (value) => isNaN(value) ? 0 : value,
getLowerValue = () => getNumericValue(firstValue()),
getHigherValue = () => getNumericValue(secondValue()),
renderPlaceholder = () => {
const
lowerValue = getLowerValue(),
higherValue = getHigherValue();
let placeholder = vnode.attrs.value_change_placeholder;
if (higherValue !== 0) placeholder = vnode.attrs.value_change_both_placeholder;
if (lowerValue !== 0)
{
placeholder = placeholder.replace('#V1', lowerValue);
}
else
{
placeholder = placeholder.replace('#V1', vnode.attrs.init_lower_value);
}
if (higherValue !== 0)
{
placeholder = placeholder.replace('#V2', higherValue);
}
else
{
placeholder = placeholder.replace('#V2', vnode.attrs.init_higher_value);
}
return placeholder;
},
showDropdown = h.toggleProp(false, true);
vnode.state = {
firstValue,
secondValue,
clearFieldValues,
getLowerValue,
getHigherValue,
renderPlaceholder,
showDropdown
};
},
view: function ({state, attrs}) {
const dropdownOptions = {};
const shouldRenderInnerFieldLabel = !!!attrs.inner_field_label;
const applyValueToFilter = () => {
const higherValue = state.getHigherValue() * attrs.value_multiplier;
const lowerValue = state.getLowerValue() * attrs.value_multiplier;
attrs.vm.gte(lowerValue);
attrs.vm.lte(higherValue);
attrs.onapply();
state.showDropdown.toggle();
};
if ('dropdown_inline_style' in attrs) {
dropdownOptions.style = attrs.dropdown_inline_style;
}
return m(attrs.wrapper_class, [
m('.fontsize-smaller.u-text-center', attrs.label),
m('div', {
style: {'z-index' : '1'}
}, [
m('select.w-select.text-field.positive', {
style: {
'margin-bottom' : '0px'
},
onmousedown: function(e) {
e.preventDefault();
if (attrs.selectable() !== attrs.index && state.showDropdown()) state.showDropdown.toggle();
attrs.selectable(attrs.index);
state.showDropdown.toggle();
}
},
[
m('option', {
value: ''
}, state.renderPlaceholder())
]),
((state.showDropdown() && attrs.selectable() == attrs.index) ?
m('nav.dropdown-list.dropdown-list-medium.card', dropdownOptions,
[
m('.u-marginbottom-20.w-row', [
m('.w-col.w-col-5.w-col-small-5.w-col-tiny-5',
m(innerFieldInput, {
shouldRenderInnerFieldLabel,
inputValue: state.firstValue,
placeholder: attrs.inner_field_placeholder,
label: attrs.inner_field_label,
min: attrs.min,
onsetValue: applyValueToFilter
})
),
m('.w-col.w-col-2.w-col-small-2.w-col-tiny-2',
m('.fontsize-smaller.u-text-center.u-margintop-10',
'a'
)
),
m('.w-col.w-col-5.w-col-small-5.w-col-tiny-5',
m(innerFieldInput, {
shouldRenderInnerFieldLabel,
inputValue: state.secondValue,
placeholder: ' ',
label: attrs.inner_field_label,
min: attrs.min,
onsetValue: applyValueToFilter
})
)
]),
m('a.fontsize-smaller.fontweight-semibold.alt-link.u-right[href=\'#\']', {
onclick: applyValueToFilter
}, 'Aplicar'),
m('a.fontsize-smaller.link-hidden[href=\'#\']', {
onclick: () => {
state.clearFieldValues();
applyValueToFilter();
}
}, 'Limpar')
])
: '')
])
]);
}
}
export default filterDropdownNumberRange;<|fim▁end|>
|
value: vnode.attrs.inputValue,
|
<|file_name|>LangevinThermostat.py<|end_file_name|><|fim▁begin|># Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""
********************************************
**espressopp.integrator.LangevinThermostat**
********************************************
.. function:: espressopp.integrator.LangevinThermostat(system)
:param system:
:type system:
"""
from espressopp.esutil import cxxinit
from espressopp import pmi
from espressopp.integrator.Extension import *
from _espressopp import integrator_LangevinThermostat
class LangevinThermostatLocal(ExtensionLocal, integrator_LangevinThermostat):
def __init__(self, system):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, integrator_LangevinThermostat, system)
#def enableAdress(self):
# if pmi.workerIsActive():
# self.cxxclass.enableAdress(self);
if pmi.isController :<|fim▁hole|> pmiproperty = [ 'gamma', 'temperature', 'adress' ]
)<|fim▁end|>
|
class LangevinThermostat(Extension):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.integrator.LangevinThermostatLocal',
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
#lib/ontologies/com/usefulinc/ns
|
<|file_name|>CharSource.java<|end_file_name|><|fim▁begin|>// This Source Code is in the Public Domain per: http://unlicense.org
package org.litesoft.commonfoundation.charstreams;
/**
* A CharSource is like a powerful version of a "char" based Iterator.
*/
public interface CharSource {
/**
* Report if there are any more characters available to get().
* <p/>
* Similar to Iterator's hasNext().
*/
public boolean anyRemaining();
/**
* Get the next character (consume it from the stream) or -1 if there are no more characters available.
*/
public int get();
/**
* Get the next character (consume it from the stream) or throw an exception if there are no more characters available.
*/
public char getRequired();
/**
* Return the next character (without consuming it) or -1 if there are no more characters available.
*/
public int peek();
/**
* Return the Next Offset (from the stream) that the peek/get/getRequired would read from (it may be beyond the stream end).
*/
public int getNextOffset();
/**
* Return the Last Offset (from the stream), which the previous get/getRequired read from (it may be -1 if stream has not been successfully read from).
*/
public int getLastOffset();
/**
* Return a string (and consume the characters) from the current position up to (but not including) the position of the 'c' character. OR "" if 'c' is not found (nothing consumed).
*/
public String getUpTo( char c );
/**
* Consume all the spaces (NOT white space) until either there are no more characters or a non space is encountered (NOT consumed).
*
* @return true if there are more characters.
*/
public boolean consumeSpaces();
/**
* Return a string (and consume the characters) from the current position thru the end of the characters OR up to (but not including) a character that is not a visible 7-bit ascii character (' ' < c <= 126).
*/<|fim▁hole|> /**
* Consume all the non-visible 7-bit ascii characters (visible c == ' ' < c <= 126) until either there are no more characters or a visible 7-bit ascii character is encountered (NOT consumed).
*
* @return true if there are more characters.
*/
public boolean consumeNonVisible7BitAscii();
}<|fim▁end|>
|
public String getUpToNonVisible7BitAscii();
|
<|file_name|>AceEditorPreview.java<|end_file_name|><|fim▁begin|>/*
* AceEditorPreview.java
*
* Copyright (C) 2009-11 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.prefs.views;
import com.google.gwt.dom.client.*;
import com.google.gwt.dom.client.Style.BorderStyle;
import com.google.gwt.dom.client.Style.Unit;
import org.rstudio.core.client.ExternalJavaScriptLoader;
import org.rstudio.core.client.ExternalJavaScriptLoader.Callback;
import org.rstudio.core.client.theme.ThemeFonts;
import org.rstudio.core.client.widget.DynamicIFrame;
import org.rstudio.core.client.widget.FontSizer;
import org.rstudio.studio.client.workbench.views.source.editors.text.ace.AceResources;
public class AceEditorPreview extends DynamicIFrame
{
public AceEditorPreview(String code)
{
code_ = code;
Style style = getStyleElement().getStyle();
style.setBorderColor("#CCC");
style.setBorderWidth(1, Unit.PX);
style.setBorderStyle(BorderStyle.SOLID);
}
@Override
protected void onFrameLoaded()
{
isFrameLoaded_ = true;
if (initialThemeUrl_ != null)
setTheme(initialThemeUrl_);
if (initialFontSize_ != null)
setFontSize(initialFontSize_);
final Document doc = getDocument();
final BodyElement body = doc.getBody();
body.getStyle().setMargin(0, Unit.PX);
body.getStyle().setBackgroundColor("white");
StyleElement style = doc.createStyleElement();
style.setType("text/css");
style.setInnerText(
".ace_editor {\n" +
"border: none !important;\n" +
"}");
setFont(ThemeFonts.getFixedWidthFont());
body.appendChild(style);
DivElement div = doc.createDivElement();
div.setId("editor");
div.getStyle().setWidth(100, Unit.PCT);
div.getStyle().setHeight(100, Unit.PCT);
div.setInnerText(code_);
body.appendChild(div);
FontSizer.injectStylesIntoDocument(doc);
FontSizer.applyNormalFontSize(div);
new ExternalJavaScriptLoader(doc, AceResources.INSTANCE.acejs().getSafeUri().asString())
.addCallback(new Callback()
{
public void onLoaded()
{
new ExternalJavaScriptLoader(doc, AceResources.INSTANCE.acesupportjs().getSafeUri().asString())
.addCallback(new Callback()
{
public void onLoaded()
{
body.appendChild(doc.createScriptElement(
"var editor = ace.edit('editor');\n" +
"editor.renderer.setHScrollBarAlwaysVisible(false);\n" +
"editor.renderer.setTheme({});\n" +
"editor.setHighlightActiveLine(false);\n" +
"editor.renderer.setShowGutter(false);\n" +
"var RMode = require('mode/r').Mode;\n" +
"editor.getSession().setMode(new RMode(false, editor.getSession().getDocument()));"));
}
});
}
});
}
public void setTheme(String themeUrl)
{
if (!isFrameLoaded_)
{
initialThemeUrl_ = themeUrl;
return;
}<|fim▁hole|>
Document doc = getDocument();
currentStyleLink_ = doc.createLinkElement();
currentStyleLink_.setRel("stylesheet");
currentStyleLink_.setType("text/css");
currentStyleLink_.setHref(themeUrl);
doc.getBody().appendChild(currentStyleLink_);
}
public void setFontSize(double fontSize)
{
if (!isFrameLoaded_)
{
initialFontSize_ = fontSize;
return;
}
FontSizer.setNormalFontSize(getDocument(), fontSize);
}
public void setFont(String font)
{
final String STYLE_EL_ID = "__rstudio_font_family";
Document document = getDocument();
Element oldStyle = document.getElementById(STYLE_EL_ID);
StyleElement style = document.createStyleElement();
style.setAttribute("type", "text/css");
style.setInnerText(".ace_editor, .ace_text-layer {\n" +
"font-family: " + font + " !important;\n" +
"}");
document.getBody().appendChild(style);
if (oldStyle != null)
oldStyle.removeFromParent();
style.setId(STYLE_EL_ID);
}
private LinkElement currentStyleLink_;
private boolean isFrameLoaded_;
private String initialThemeUrl_;
private Double initialFontSize_;
private final String code_;
}<|fim▁end|>
|
if (currentStyleLink_ != null)
currentStyleLink_.removeFromParent();
|
<|file_name|>sync_normal.hpp<|end_file_name|><|fim▁begin|>#ifndef BOOST_NETWORK_PROTOCOL_HTTP_IMPL_HTTP_SYNC_CONNECTION_20100601
#define BOOST_NETWORK_PROTOCOL_HTTP_IMPL_HTTP_SYNC_CONNECTION_20100601
// Copyright 2013 Google, Inc.
// Copyright 2010 (C) Dean Michael Berris
// Copyright 2010 (C) Sinefunc, Inc.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <iterator>
#include <functional>
#include <asio/deadline_timer.hpp>
#include <asio/streambuf.hpp>
#include <boost/network/protocol/http/algorithms/linearize.hpp>
#include <boost/network/protocol/http/response.hpp>
#include <boost/network/protocol/http/traits/resolver_policy.hpp>
#include <boost/network/traits/string.hpp>
namespace boost {
namespace network {
namespace http {
namespace impl {
template <class Tag, unsigned version_major, unsigned version_minor>
struct sync_connection_base_impl;
template <class Tag, unsigned version_major, unsigned version_minor>
struct sync_connection_base;
template <class Tag, unsigned version_major, unsigned version_minor>
struct http_sync_connection
: public virtual sync_connection_base<Tag, version_major, version_minor>,
sync_connection_base_impl<Tag, version_major, version_minor>,
std::enable_shared_from_this<<|fim▁hole|> typedef typename string<Tag>::type string_type;
typedef std::function<typename resolver_base::resolver_iterator_pair(
resolver_type&, string_type const&, string_type const&)>
resolver_function_type;
typedef http_sync_connection<Tag, version_major, version_minor> this_type;
typedef sync_connection_base_impl<Tag, version_major, version_minor>
connection_base;
typedef std::function<bool(string_type&)> body_generator_function_type;
http_sync_connection(resolver_type& resolver, resolver_function_type resolve,
int timeout)
: connection_base(),
timeout_(timeout),
timer_(resolver.get_io_service()),
resolver_(resolver),
resolve_(std::move(resolve)),
socket_(resolver.get_io_service()) {}
void init_socket(string_type const& hostname, string_type const& port) {
connection_base::init_socket(socket_, resolver_, hostname, port, resolve_);
}
void send_request_impl(string_type const& method,
basic_request<Tag> const& request_,
body_generator_function_type generator) {
asio::streambuf request_buffer;
linearize(
request_, method, version_major, version_minor,
std::ostreambuf_iterator<typename char_<Tag>::type>(&request_buffer));
connection_base::send_request_impl(socket_, method, request_buffer);
if (generator) {
string_type chunk;
while (generator(chunk)) {
std::copy(chunk.begin(), chunk.end(),
std::ostreambuf_iterator<typename char_<Tag>::type>(
&request_buffer));
chunk.clear();
connection_base::send_request_impl(socket_, method, request_buffer);
}
}
if (timeout_ > 0) {
timer_.expires_from_now(boost::posix_time::seconds(timeout_));
auto self = this->shared_from_this();
timer_.async_wait([=] (std::error_code const &ec) {
self->handle_timeout(ec);
});
}
}
void read_status(basic_response<Tag>& response_,
asio::streambuf& response_buffer) {
connection_base::read_status(socket_, response_, response_buffer);
}
void read_headers(basic_response<Tag>& response,
asio::streambuf& response_buffer) {
connection_base::read_headers(socket_, response, response_buffer);
}
void read_body(basic_response<Tag>& response_,
asio::streambuf& response_buffer) {
connection_base::read_body(socket_, response_, response_buffer);
typename headers_range<basic_response<Tag> >::type connection_range =
headers(response_)["Connection"];
if (version_major == 1 && version_minor == 1 &&
!boost::empty(connection_range) &&
boost::iequals(std::begin(connection_range)->second, "close")) {
close_socket();
} else if (version_major == 1 && version_minor == 0) {
close_socket();
}
}
bool is_open() { return socket_.is_open(); }
void close_socket() {
timer_.cancel();
if (!is_open()) {
return;
}
std::error_code ignored;
socket_.shutdown(asio::ip::tcp::socket::shutdown_both, ignored);
if (ignored) {
return;
}
socket_.close(ignored);
}
private:
void handle_timeout(std::error_code const& ec) {
if (!ec) {
close_socket();
}
}
int timeout_;
asio::deadline_timer timer_;
resolver_type& resolver_;
resolver_function_type resolve_;
asio::ip::tcp::socket socket_;
};
} // namespace impl
} // namespace http
} // namespace network
} // namespace boost
#endif // BOOST_NETWORK_PROTOCOL_HTTP_IMPL_HTTP_SYNC_CONNECTION_20100<|fim▁end|>
|
http_sync_connection<Tag, version_major, version_minor> > {
typedef typename resolver_policy<Tag>::type resolver_base;
typedef typename resolver_base::resolver_type resolver_type;
|
<|file_name|>admin.js<|end_file_name|><|fim▁begin|>////functionen hämtar alla artiklar med hjälp av getJSON
//och får tillbaka en array med alla artiklar
//efter den är klar kallar den på functionen ShoArtTab.
//som skriver ut alla artiklar i en tabell.
function getAllAdminProducts()
{
$.getJSON("index2.php/getAllProducts").done(showArtTab);
}
//functionen showArtTab får en array av getAllArticle funktionen
//som den loopar igenom och anväder för att skapa upp en tabell med alla de olika
//artiklarna
function showArtTab(cart){
var mainTabell = document.createElement('div');
mainTabell.setAttribute('id', 'mainTabell');
var tbl = document.createElement('table');
tbl.setAttribute('border', '1');
var tr = document.createElement('tr');
var th2 = document.createElement('th');
var txt2 = document.createTextNode('Produktid');
var th3 = document.createElement('th');
var txt3 = document.createTextNode('Produktnamn');
var th4 = document.createElement('th');
var txt4 = document.createTextNode('Kategori');
var th5 = document.createElement('th');
var txt5 = document.createTextNode('Pris');
var th6 = document.createElement('th');
var txt6 = document.createTextNode('Bild');
var th7 = document.createElement('th');
var txt7 = document.createTextNode('Delete');
var th8 = document.createElement('th');
var txt8 = document.createTextNode('Update');
th2.appendChild(txt2);
tr.appendChild(th2);
th3.appendChild(txt3);
tr.appendChild(th3);
th4.appendChild(txt4);
tr.appendChild(th4);
th5.appendChild(txt5);
tr.appendChild(th5);
th6.appendChild(txt6);
tr.appendChild(th6);
th7.appendChild(txt7);
tr.appendChild(th7);
th8.appendChild(txt8);
tr.appendChild(th8);
tbl.appendChild(tr);
var i = 0;
do{
var row = tbl.insertRow(-1);
row.insertCell(-1).innerHTML = cart[i].produktid;
var cell2 = row.insertCell(-1);
cell2.innerHTML = cart[i].namn;
var cell3 = row.insertCell(-1);
cell3.innerHTML = cart[i].kategori;
var cell4 = row.insertCell(-1);
cell4.innerHTML = cart[i].pris;
var cell6 = row.insertCell(-1);
cell6.innerHTML = '<img src="' + cart[i].img + '" height="70" width="70"/>';
var cell7 = row.insertCell(-1);
cell7.innerHTML = "<a href='#' onclick='removeArt(\"" +cart[i].produktid+ "\",\""+ "\");'>Remove</a>";
var cell8 = row.insertCell(-1);
cell8.innerHTML = "<a href='#' onclick='getUpdate(\"" +cart[i].produktid+ "\",\""+ "\");'>Update</a>";
tbl.appendChild(row);
i++;
}while(i< cart.length);
$('#main').html(tbl);
}
//öppnar en dialogruta när man trycker på "Add Article" knappen
//med det som finns i diven med id:addArt
function showAddArt(){
$('#addArt').dialog({
show:'fade', position:'center'
});
}
//när man trycker på "Add Article" knappen i dialogrutan så ska den lägga till datan
//från formuläret som görs med .serialize som hämtar datan från textfälten.
function addArticle(){
$.post('index2.php/AdminController/addProduct',$('#addArtForm').serialize()).done(getAllAdminProducts);
$("#addArt").dialog('close');
}
//tar bort en artikel med att hämta in dess namn och skicka förfrågningen till modellen
function deleteArt(prodid)
{
$.getJSON("index2.php/AdminController/deleteProduct/"+prodid);
}
function removeArt(prodid){
var r=confirm("Vill du ta bort den här produkten?");<|fim▁hole|> x="NEJ";
}
if(x === "JA"){
deleteArt(prodid);
getAllAdminProducts();
}
}
//en function som tar in namnet
//och använder det när man kallar på getArt
function getUpdate(prodid){
getArt(prodid);
}
//får in artikelid och använder det för att hämta alla artiklar som har samma
//id med hjälp av modellen.
function getArt(prodid){
$.getJSON("index2.php/Controller/getProdById/"+prodid).done(showArt);
}
//en function som visar en dialog ruta med färdig i fylld data i textfällten
//från den uppgift man vill uppdatera.
function showArt(data){
$('#updateId').attr('value',data[0].produktid);
$('#updateNamn').attr('value',data[0].namn);
$('#updateKategori').attr('value',data[0].kategori);
$('#updatePris').attr('value',data[0].pris);
$('#updateImg').attr('value',data[0].img);
$('#update').dialog({
show:'fade', position:'center',
});
}
//när man trycker på uppdatera så hämtar den datan från forumlätert med hjälp av
// .serialize och skickar datan till modellen.
//stänger sen dialogrutan.
function updateArt(){
$.post("index2.php/AdminController/updateProduct/", $('#updateForm').serialize()).done(getAllAdminProducts);
$("#update").dialog('close');
}<|fim▁end|>
|
if (r==true)
{
x="JA";
} else {
|
<|file_name|>feeds.js<|end_file_name|><|fim▁begin|>define([], () => {
'use strict';
class FeedsError extends Error {
constructor(...args) {
console.error('FeedsError', args);
super(args);
}
}
class ServerError extends Error {
constructor(...args) {
super(args);
}
}
function queryToString(query) {
return Array.from(query.entries()).map(([k, v]) => {
return [
k, encodeURIComponent(v)
].join('=');
}).join('&');
}
class FeedsClient {
constructor(params) {
this.params = params;
}
put(path, body) {
const url = (this.baseURLPath().concat(path)).join('/');
return fetch(url, {
headers: {
Authorization: this.params.token,
Accept: 'application/json',
'Content-Type': 'application/json'
},
mode: 'cors',
method: 'PUT',
body: JSON.stringify(body)
})
.then((response) => {
if (response.status === 500) {
switch (response.headers.get('Content-Type')) {
case 'application/json':
return response.json()
.then((result) => {
throw new FeedsError(result);
});
case 'text/plain':
return response.text()
.then((errorText) => {
throw new ServerError(errorText);
});
default:
throw new Error('Unexpected content type: ' + response.headers.get('Content-Type'));
}
} else if (response.status !== 200) {
throw new Error('Unexpected response: ' + response.status + ' : ' + response.statusText);
} else {
return response.json()
.then((result) => {
return result;
});
}
});
}
post(path, body) {
const url = (this.baseURLPath().concat(path)).join('/');
return fetch(url, {
headers: {
Authorization: this.params.token,
Accept: 'application/json',
'Content-Type': 'application/json'
},
mode: 'cors',
method: 'POST',
body: body ? JSON.stringify(body) : ''
})
.then((response) => {
if (response.status === 500) {
switch (response.headers.get('Content-Type')) {
case 'application/json':
return response.json()
.then((result) => {
throw new FeedsError(result);
});
case 'text/plain':
return response.text()
.then((errorText) => {
throw new ServerError(errorText);
});
default:
throw new Error('Unexpected content type: ' + response.headers.get('Content-Type'));
}
} else if (response.status === 200) {
return response.json();
} else if (response.status === 204) {
return null;
} else {
throw new Error('Unexpected response: ' + response.status + ' : ' + response.statusText);
}
});
}
postWithResult(path, body) {
const url = (this.baseURLPath().concat(path)).join('/');
return fetch(url, {
headers: {
Authorization: this.params.token,
Accept: 'application/json',
'Content-Type': 'application/json'
},
mode: 'cors',
method: 'POST',
body: body ? JSON.stringify(body) : ''
})
.then((response) => {
if (response.status === 500) {
switch (response.headers.get('Content-Type')) {
case 'application/json':
return response.json()
.then((result) => {
throw new FeedsError(result);
});
case 'text/plain':
return response.text()
.then((errorText) => {
throw new ServerError(errorText);
});
default:
throw new Error('Unexpected content type: ' + response.headers.get('Content-Type'));
}
} else if (response.status === 200) {
return response.json();
} else {
throw new Error('Unexpected response: ' + response.status + ' : ' + response.statusText);
}
});
}
makeUrl(path, query) {
const baseUrl = (this.baseURLPath().concat(path)).join('/');
if (query) {
return baseUrl +
'?' +
queryToString(query);
}
return baseUrl;
}
get(path, query) {
const url = this.makeUrl(path, query);
return fetch(url, {
headers: {
Authorization: this.params.token,
Accept: 'application/json',
'Content-Type': 'application/json'
},<|fim▁hole|> if (response.status === 500) {
switch (response.headers.get('Content-Type')) {
case 'application/json':
return response.json()
.then((result) => {
throw new FeedsError(result);
});
case 'text/plain':
return response.text()
.then((errorText) => {
throw new ServerError(errorText);
});
default:
throw new Error('Unexpected content type: ' + response.headers.get('Content-Type'));
}
} else if (response.status === 200) {
return response.json();
} else {
throw new Error('Unexpected response: ' + response.status + ' : ' + response.statusText);
}
});
}
baseURLPath() {
return [this.params.url, 'api', 'V1'];
}
getNotifications({ count = 100 } = {}) {
const options = new Map();
options.set('n', String(count));
return this.get(['notifications'], options);
}
getUnseenNotificationCount() {
return this.get(['notifications', 'unseen_count']);
}
seeNotifications(param) {
return this.postWithResult(['notifications', 'see'], param);
}
}
return { FeedsClient, FeedsError, ServerError };
});<|fim▁end|>
|
mode: 'cors',
method: 'GET'
})
.then((response) => {
|
<|file_name|>physical_z_tag_test.py<|end_file_name|><|fim▁begin|># Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cirq
import cirq_google
def test_equality():
assert cirq_google.PhysicalZTag() == cirq_google.PhysicalZTag()
assert hash(cirq_google.PhysicalZTag()) == hash(cirq_google.PhysicalZTag())
def test_syc_str_repr():
assert str(cirq_google.PhysicalZTag()) == 'PhysicalZTag()'
assert repr(cirq_google.PhysicalZTag()) == 'cirq_google.PhysicalZTag()'<|fim▁hole|> cirq_google.PhysicalZTag(), setup_code=('import cirq\nimport cirq_google\n')
)<|fim▁end|>
|
cirq.testing.assert_equivalent_repr(
|
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>import ddt
from django.contrib.auth import login, authenticate
from importlib import import_module
from django_lti_tool_provider import AbstractApplicationHookManager
from mock import patch, Mock
from oauth2 import Request, Consumer, SignatureMethod_HMAC_SHA1
from django.contrib.auth.models import User
from django.test.utils import override_settings
from django.test import Client, TestCase, RequestFactory
from django.conf import settings
from django_lti_tool_provider.models import LtiUserData
from django_lti_tool_provider.views import LTIView
@override_settings(
LTI_CLIENT_KEY='qertyuiop1234567890!@#$%^&*()_+[];',
LTI_CLIENT_SECRET='1234567890!@#$%^&*()_+[];./,;qwertyuiop'
)
class LtiRequestsTestBase(TestCase):
_data = {
"lis_result_sourcedid": "lis_result_sourcedid",
"context_id": "LTIX/LTI-101/now",
"user_id": "1234567890",
"roles": ["Student"],
"lis_outcome_service_url": "lis_outcome_service_url",
"resource_link_id": "resource_link_id",
"lti_version": "LTI-1p0",
'lis_person_sourcedid': 'username',
'lis_person_contact_email_primary': '[email protected]'
}
_url_base = 'http://testserver'
DEFAULT_REDIRECT = '/home'
def setUp(self):
self.client = Client()
self.hook_manager = Mock(spec=AbstractApplicationHookManager)
self.hook_manager.vary_by_key = Mock(return_value=None)
self.hook_manager.optional_lti_parameters = Mock(return_value={})
LTIView.register_authentication_manager(self.hook_manager)
@property
def consumer(self):
return Consumer(settings.LTI_CLIENT_KEY, settings.LTI_CLIENT_SECRET)
def _get_signed_oauth_request(self, path, method, data=None):
data = data if data is not None else self._data
url = self._url_base + path
method = method if method else 'GET'
req = Request.from_consumer_and_token(self.consumer, {}, method, url, data)
req.sign_request(SignatureMethod_HMAC_SHA1(), self.consumer, None)
return req
def get_correct_lti_payload(self, path='/lti/', method='POST', data=None):
req = self._get_signed_oauth_request(path, method, data)
return req.to_postdata()
def get_incorrect_lti_payload(self, path='/lti/', method='POST', data=None):
req = self._get_signed_oauth_request(path, method, data)
req['oauth_signature'] += '_broken'
return req.to_postdata()
def send_lti_request(self, payload, client=None):
client = client or self.client
return client.post('/lti/', payload, content_type='application/x-www-form-urlencoded')
def _authenticate(self, username='test'):
self.client = Client()
user = User.objects.get(username=username)
logged_in = self.client.login(username=username, password='test')
self.assertTrue(logged_in)
return user
def _logout(self):
self.client.logout()
def _verify_redirected_to(self, response, expected_url):
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, expected_url)
def _verify_session_lti_contents(self, session, expected):
self.assertIn('lti_parameters', session)
self._verify_lti_data(session['lti_parameters'], expected)
def _verify_lti_data(self, actual, expected):
for key, value in expected.items():
self.assertEqual(value, actual[key])
def _verify_lti_created(self, user, expected_lti_data, custom_key=None):
key = custom_key if custom_key else ''
lti_data = LtiUserData.objects.get(user=user, custom_key=key)
self.assertIsNotNone(lti_data)
self.assertEqual(lti_data.custom_key, key)
for key, value in expected_lti_data.items():
self.assertEqual(value, lti_data.edx_lti_parameters[key])
class AnonymousLtiRequestTests(LtiRequestsTestBase):
def setUp(self):
super(AnonymousLtiRequestTests, self).setUp()
self.hook_manager.anonymous_redirect_to = Mock(return_value=self.DEFAULT_REDIRECT)
def test_given_incorrect_payload_throws_bad_request(self):
response = self.send_lti_request(self.get_incorrect_lti_payload())
self.assertEqual(response.status_code, 400)
self.assertIn("Invalid LTI Request", response.content)
def test_given_correct_requests_sets_session_variable(self):
response = self.send_lti_request(self.get_correct_lti_payload())
self._verify_redirected_to(response, self.DEFAULT_REDIRECT)
self._verify_session_lti_contents(self.client.session, self._data)
@ddt.ddt
@patch('django_lti_tool_provider.views.Signals.LTI.received.send')
class AuthenticatedLtiRequestTests(LtiRequestsTestBase):
def _authentication_hook(self, request, user_id=None, username=None, email=None, **kwargs):
user = User.objects.create_user(username or user_id, password='1234', email=email)
user.save()
authenticated_user = authenticate(request, username=user.username, password='1234')
login(request, authenticated_user)
return user
def setUp(self):
super(AuthenticatedLtiRequestTests, self).setUp()
self.hook_manager.authenticated_redirect_to = Mock(return_value=self.DEFAULT_REDIRECT)
self.hook_manager.authentication_hook = self._authentication_hook
def _verify_lti_updated_signal_is_sent(self, patched_send_lti_received, expected_user):
expected_lti_data = LtiUserData.objects.get(user=expected_user)
patched_send_lti_received.assert_called_once_with(LTIView, user=expected_user, lti_data=expected_lti_data)
def test_no_session_given_incorrect_payload_throws_bad_request(self, _):
response = self.send_lti_request(self.get_incorrect_lti_payload())
self.assertEqual(response.status_code, 400)
self.assertIn("Invalid LTI Request", response.content)
def test_no_session_correct_payload_processes_lti_request(self, patched_send_lti_received):
# Precondition check
self.assertFalse(LtiUserData.objects.all())
response = self.send_lti_request(self.get_correct_lti_payload())
# Should have been created.
user = User.objects.all()[0]
self._verify_lti_created(user, self._data)
self._verify_redirected_to(response, self.DEFAULT_REDIRECT)
self._verify_lti_updated_signal_is_sent(patched_send_lti_received, user)
def test_given_session_and_lti_uses_lti(self, patched_send_lti_received):
# Precondition check
self.assertFalse(LtiUserData.objects.all())
session = self.client.session
session['lti_parameters'] = {}
session.save()
response = self.send_lti_request(self.get_correct_lti_payload())
# Should have been created.
user = User.objects.all()[0]
self._verify_lti_created(user, self._data)
self._verify_redirected_to(response, self.DEFAULT_REDIRECT)
self._verify_lti_updated_signal_is_sent(patched_send_lti_received, user)
def test_force_login_change(self, patched_send_lti_received):
self.assertFalse(User.objects.exclude(id=1))
payload = self.get_correct_lti_payload()
request = self.send_lti_request(payload, client=RequestFactory())
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
request.user = None
user = self._authentication_hook(request, username='goober')
request.session.save()
self.assertEqual(request.user, user)
LTIView.as_view()(request)
# New user creation not actually available during tests.
self.assertTrue(request.user)
new_user = User.objects.exclude(username='goober')[0]
self.assertEqual(request.user, new_user)
# Verify a new user is not created with the same data if re-visiting.
request = self.send_lti_request(payload, client=RequestFactory())
request.session = engine.SessionStore()
request.user = None
authenticated_user = authenticate(request, username=new_user.username, password='1234')
self.assertTrue(authenticated_user)
login(request, authenticated_user)
LTIView.as_view()(request)
self.assertEqual(request.user, authenticated_user)
self.assertEqual(authenticated_user, new_user)
self.assertEqual(LtiUserData.objects.all().count(), 1)
@ddt.ddt
class AuthenticationManagerIntegrationTests(LtiRequestsTestBase):
TEST_URLS = "/some_url", "/some_other_url", "http://qwe.asd.zxc.com"
def setUp(self):
super(AuthenticationManagerIntegrationTests, self).setUp()
def tearDown(self):
LTIView.authentication_manager = None
self._logout()
def _authenticate_user(self, request, user_id=None, username=None, email=None, **kwargs):
if not username:
username = "test_username"
password = "test_password"
user = User.objects.create_user(username=username, email=email, password=password)
authenticated_user = authenticate(request, username=username, password=password)
login(request, authenticated_user)
self.addCleanup(lambda: user.delete())
def test_authentication_hook_executed_if_not_authenticated(self):
payload = self.get_correct_lti_payload()
self.send_lti_request(payload)
args, user_data = self.hook_manager.authentication_hook.call_args
request = args[0]
self.assertEqual(request.body, payload)
self.assertFalse(request.user.is_authenticated)
expected_user_data = {
'username': self._data['lis_person_sourcedid'],
'email': self._data['lis_person_contact_email_primary'],
'user_id': self._data['user_id'],
'extra_params': {}
}
self.assertEqual(user_data, expected_user_data)
def test_authentication_hook_passes_optional_lti_data(self):
payload = self.get_correct_lti_payload()
self.hook_manager.optional_lti_parameters.return_value = {'resource_link_id': 'link_id', 'roles': 'roles'}
self.send_lti_request(payload)
args, user_data = self.hook_manager.authentication_hook.call_args
request = args[0]
self.assertEqual(request.body, payload)
self.assertFalse(request.user.is_authenticated)<|fim▁hole|> 'user_id': self._data['user_id'],
'extra_params': {
'roles': ['Student'],
'link_id': 'resource_link_id',
}
}
self.assertEqual(user_data, expected_user_data)
@ddt.data(*TEST_URLS)
def test_anonymous_lti_is_processed_if_hook_does_not_authenticate_user(self, expected_url):
self.hook_manager.anonymous_redirect_to.return_value = expected_url
response = self.send_lti_request(self.get_correct_lti_payload())
self._verify_redirected_to(response, expected_url)
self._verify_session_lti_contents(self.client.session, self._data)
# verifying correct parameters were passed to auth manager hook
request, lti_data = self.hook_manager.anonymous_redirect_to.call_args[0]
self._verify_session_lti_contents(request.session, self._data)
self._verify_lti_data(lti_data, self._data)
@ddt.data(*TEST_URLS)
def test_authenticated_lti_is_processed_if_hook_authenticates_user(self, expected_url):
self.hook_manager.authentication_hook.side_effect = self._authenticate_user
self.hook_manager.authenticated_redirect_to.return_value = expected_url
response = self.send_lti_request(self.get_correct_lti_payload())
self._verify_redirected_to(response, expected_url)
# verifying correct parameters were passed to auth manager hook
request, lti_data = self.hook_manager.authenticated_redirect_to.call_args[0]
user = request.user
self._verify_lti_created(user, self._data)
self._verify_lti_data(lti_data, self._data)
@ddt.data('custom', 'very custom', 'extremely custom')
def test_authenticated_lti_saves_custom_key_if_specified(self, key):
self.hook_manager.vary_by_key.return_value = key
self.hook_manager.authentication_hook.side_effect = self._authenticate_user
self.send_lti_request(self.get_correct_lti_payload())
request, lti_data = self.hook_manager.authenticated_redirect_to.call_args[0]
user = request.user
self._verify_lti_created(user, self._data, key)<|fim▁end|>
|
expected_user_data = {
'username': self._data['lis_person_sourcedid'],
'email': self._data['lis_person_contact_email_primary'],
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import sys
import traceback
import logging
import time
import inspect
def run_resilient(function, function_args=[], function_kwargs={}, tolerated_errors=(Exception,), log_prefix='Something failed, tolerating error and retrying: ', retries=5, delay=True, critical=False, initial_delay_time=0.1, delay_multiplier = 2.0):
"""Run the function with function_args and function_kwargs. Warn if it excepts, and retry. If retries are exhausted, <|fim▁hole|> def show_exception_info(log_prefix):
"""Warn about an exception with a lower priority message, with a text prefix and the error type"""
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
traceback_text = lines[2]
logging.info(log_prefix + traceback_text)
return
delay_time = initial_delay_time
while retries:
retries -= 1
try:
return function(*function_args, **function_kwargs)
except tolerated_errors, error: #IGNORE:W0703
# One of our anticipated errors happened.
if retries:
# We've got more retries left. Log the error, and continue.
show_exception_info(log_prefix)
if delay:
time.sleep(delay_time)
delay_time = delay_time * delay_multiplier
else:
delay_time = 0
logging.info('We have %d tries left. Delaying for %.2f seconds and trying again.', retries, delay_time)
else:
logging.warn('Could not complete action after %d retries.', retries)
if critical:
logging.error('Critical action failed.')
raise error
except Exception:
# We've recieved an error we didn't anticipate. This is bad.
# Depending on the error we the developers should either fix something, or, if we want to tolerate it,
# add it to our tolerated_errors.
# Those things require human judgement, so we'll raise the exception.
logging.exception('Unanticipated error recieved!') #Log the exception
raise #Re-raise
except:
typ, value, unused = sys.exc_info()
# We've received an exception that isn't even an Exception subclass!
# This is bad manners - see http://docs.python.org/tutorial/errors.html:
# "Exceptions should typically be derived from the Exception class, either directly or indirectly."
logging.exception("Bad mannered exception. Class was: %s Value was: %s Source file: %s", typ.__name__, str(value), inspect.getsourcefile(typ))
raise<|fim▁end|>
|
log that, and if it's critical, properly throw the exception """
|
<|file_name|>premultiplyfx.cpp<|end_file_name|><|fim▁begin|>#include "stdfx.h"
//#include "tfxparam.h"
#include "trop.h"
//===================================================================
class PremultiplyFx : public TStandardRasterFx {
FX_PLUGIN_DECLARATION(PremultiplyFx)
TRasterFxPort m_input;
public:
PremultiplyFx() { addInputPort("Source", m_input); }
~PremultiplyFx(){};
bool doGetBBox(double frame, TRectD &bBox, const TRenderSettings &info) {
if (m_input.isConnected())
return m_input->doGetBBox(frame, bBox, info);
else {
bBox = TRectD();
return false;<|fim▁hole|>
void doCompute(TTile &tile, double frame, const TRenderSettings &ri);
bool canHandle(const TRenderSettings &info, double frame) { return true; }
};
//------------------------------------------------------------------------------
void PremultiplyFx::doCompute(TTile &tile, double frame,
const TRenderSettings &ri) {
if (!m_input.isConnected()) return;
m_input->compute(tile, frame, ri);
TRop::premultiply(tile.getRaster());
}
FX_PLUGIN_IDENTIFIER(PremultiplyFx, "premultiplyFx");<|fim▁end|>
|
}
}
|
<|file_name|>Coordinates.py<|end_file_name|><|fim▁begin|>class Coordinates:
""" WhiteSource model for artifact's coordinates. """
def __init__(self, group_id, artifact_id, version_id):
self.groupId = group_id
self.artifactId = artifact_id
self.versionId = version_id
def create_project_coordinates(distribution):
""" Creates a 'Coordinates' instance for the user package"""<|fim▁hole|> dist_version = distribution.get_version()
coordinates = Coordinates(group_id=None, artifact_id=dist_name, version_id=dist_version)
return coordinates<|fim▁end|>
|
dist_name = distribution.get_name()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.