prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Enemy.java<|end_file_name|><|fim▁begin|>package com.blogspot.ludumdaresforfun;
import com.badlogic.gdx.graphics.g2d.Animation;
import com.badlogic.gdx.graphics.g2d.TextureAtlas.AtlasRegion;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.scenes.scene2d.ui.Image;
import com.badlogic.gdx.scenes.scene2d.utils.TextureRegionDrawable;
public class Enemy extends Image{
final float VELOCITY = 50f;
final float ATTACK_VELOCITY = 130f;
enum State {
Walking, Running, Hurting, BeingInvoked
}
Vector2 desiredPosition = new Vector2();
final Vector2 velocity = new Vector2();
State state = State.Walking;
boolean facesRight = false;
public boolean updateVelocity;
public boolean setToDie = false;
public boolean running = false;
public float attackHereX = 0;
public boolean attackRight = false;
public enum Direction {
Left, Right
}
public Direction dir = Direction.Left;
public Rectangle rect = new Rectangle();
public int diffInitialPos = 0;
public final int RANGE = 100;
public final int ATTACK_DISTANCE = 100;
protected Animation animation = null;
float stateTime = 0;
float offSetX;
public boolean dying = false;
public boolean canMove = false;
public AtlasRegion actualFrame;
public boolean beingInvoked = false;
public Enemy(Animation animation) {
super(animation.getKeyFrame(0));
this.animation = animation;
this.actualFrame = ((AtlasRegion)animation.getKeyFrame(0));
}
public Rectangle getRect() {
this.rect.set(this.getX(), this.getY(),this.actualFrame.packedWidth, this.actualFrame.packedHeight);
return this.rect;
}
public void die(){
// sound and set to die
Assets.playSound("enemyDead");
this.state = State.Hurting;
this.stateTime = 0;
this.dying = true;
this.velocity.x = 0;
}
public void run() {
if (this.state != Enemy.State.Running)
{
Assets.playSound("enemyAttack");
if (this.dir == Direction.Left) {
this.diffInitialPos -= 2;
this.velocity.x = -this.ATTACK_VELOCITY;
}
else {
this.diffInitialPos += 2;
this.velocity.x = this.ATTACK_VELOCITY;
}
this.state = Enemy.State.Running;
this.stateTime = 0;
this.running = true;
}
}
public void walk() {
if (this.dir == Direction.Left) {
this.diffInitialPos -= 1;
this.velocity.x = -this.VELOCITY;
}
else {
this.diffInitialPos += 1;
this.velocity.x = this.VELOCITY;<|fim▁hole|> }
@Override
public void act(float delta) {
((TextureRegionDrawable)this.getDrawable()).setRegion(this.animation.getKeyFrame(this.stateTime+=delta, true));
super.act(delta);
}
}<|fim▁end|>
|
}
this.state = Enemy.State.Walking;
|
<|file_name|>test_consolor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
consolor
Copyright (c) 2013-2014, Friedrich Paetzke ([email protected])
All rights reserved.
"""
from __future__ import print_function
from consolor import BgColor, Color, get_line
try:
from unittest.mock import call, patch
except ImportError:
from mock import call, patch
def mockable_print(*args, **kwargs):
print(*args, **kwargs)
def test_print_bold():
result = get_line('123 bold', bold=True)
expected = '\x1b[1m123 bold\x1b[0m'
assert result == expected
def test_print_underline():
result = get_line('123 underline', underline=True)
expected = '\x1b[4m123 underline\x1b[0m'
assert result == expected
def test_get_bgcolor():
result = get_line('123 green bg', bgcolor=BgColor.Green)
expected = '\x1b[42;1m123 green bg\x1b[0m'
assert result == expected
def test_get_color():
result = get_line('123 light green', color=Color.LightGreen)
expected = '\x1b[1;32m123 light green\x1b[0m'
assert result == expected
def test_update_line():
for i in reversed(range(101)):
line = get_line('123%d' % i, update_line=True)
expected = '\x1b[2K\r%s%d\x1b[0m' % ('123', i)
assert line == expected
@patch('tests.test_consolor.mockable_print')
def test_print_color(mocked_print):
mockable_print(Color.Red, 'Red')
mockable_print('Red two')
mockable_print(Color.Reset, end='')
mockable_print('Not Red')
mocked_print.assert_has_calls([call('\x1b[0;31m', 'Red'),
call('Red two'),
call('\x1b[0m', end=''),
call('Not Red')])
@patch('tests.test_consolor.mockable_print')
def test_print_concat_color(mocked_print):
mockable_print(Color.Red, 'Red')
mockable_print('Red two')
mockable_print(Color.Blue, 'Blue')
mockable_print(Color.Reset, end='')
mockable_print('Not Blue')
mocked_print.assert_has_calls([call('\x1b[0;31m', 'Red'),
call('Red two'),
call('\x1b[0;34m', 'Blue'),
call('\x1b[0m', end=''),
call('Not Blue')])
@patch('tests.test_consolor.mockable_print')
def test_print_bgcolor(mocked_print):
mockable_print(BgColor.Red, 'Red')
mockable_print('Red two', BgColor.Reset)
mockable_print('None')
mocked_print.assert_has_calls([call('\x1b[41;1m', 'Red'),
call('Red two', '\x1b[0m'),
call('None')])<|fim▁hole|> mockable_print(BgColor.Red, 'Red')
mockable_print('Red two')
mockable_print(BgColor.Cyan, 'None')
mockable_print(BgColor.Reset)
mocked_print.assert_has_calls([call('\x1b[41;1m', 'Red'),
call('Red two'),
call('\x1b[46;1m', 'None'),
call('\x1b[0m')])
def test_color_and_bgcolor():
result = get_line('1', bgcolor=BgColor.Green, color=Color.Red)
expected = '\x1b[0;31m\x1b[42;1m1\x1b[0m'
assert result == expected<|fim▁end|>
|
@patch('tests.test_consolor.mockable_print')
def test_print_concat_bgcolor(mocked_print):
|
<|file_name|>ser.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Serde Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use serde::{self, Serialize};
use error::{Error, ErrorCode};
use map::Map;
use number::Number;
use value::{Value, to_value};
impl Serialize for Value {
#[inline]
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
{
match *self {
Value::Null => serializer.serialize_unit(),
Value::Bool(b) => serializer.serialize_bool(b),
Value::Number(ref n) => n.serialize(serializer),
Value::String(ref s) => serializer.serialize_str(s),
Value::Array(ref v) => v.serialize(serializer),
Value::Object(ref m) => {
use serde::ser::SerializeMap;
let mut map = try!(serializer.serialize_map(Some(m.len())));
for (k, v) in m {
try!(map.serialize_key(k));
try!(map.serialize_value(v));
}
map.end()
}
}
}
}
pub struct Serializer;
impl serde::Serializer for Serializer {
type Ok = Value;
type Error = Error;
type SerializeSeq = SerializeVec;
type SerializeTuple = SerializeVec;
type SerializeTupleStruct = SerializeVec;
type SerializeTupleVariant = SerializeTupleVariant;
type SerializeMap = SerializeMap;
type SerializeStruct = SerializeMap;
type SerializeStructVariant = SerializeStructVariant;
#[inline]
fn serialize_bool(self, value: bool) -> Result<Value, Error> {
Ok(Value::Bool(value))
}
#[inline]
fn serialize_i8(self, value: i8) -> Result<Value, Error> {
self.serialize_i64(value as i64)
}
#[inline]
fn serialize_i16(self, value: i16) -> Result<Value, Error> {
self.serialize_i64(value as i64)
}
#[inline]
fn serialize_i32(self, value: i32) -> Result<Value, Error> {
self.serialize_i64(value as i64)
}
fn serialize_i64(self, value: i64) -> Result<Value, Error> {
Ok(Value::Number(value.into()))
}
#[inline]
fn serialize_u8(self, value: u8) -> Result<Value, Error> {
self.serialize_u64(value as u64)
}
#[inline]
fn serialize_u16(self, value: u16) -> Result<Value, Error> {
self.serialize_u64(value as u64)
}
#[inline]
fn serialize_u32(self, value: u32) -> Result<Value, Error> {
self.serialize_u64(value as u64)
}
#[inline]
fn serialize_u64(self, value: u64) -> Result<Value, Error> {
Ok(Value::Number(value.into()))
}
#[inline]
fn serialize_f32(self, value: f32) -> Result<Value, Error> {
self.serialize_f64(value as f64)
}
#[inline]
fn serialize_f64(self, value: f64) -> Result<Value, Error> {
Ok(Number::from_f64(value).map_or(Value::Null, Value::Number))
}
#[inline]
fn serialize_char(self, value: char) -> Result<Value, Error> {
let mut s = String::new();
s.push(value);
self.serialize_str(&s)
}
#[inline]
fn serialize_str(self, value: &str) -> Result<Value, Error> {
Ok(Value::String(value.to_owned()))
}
fn serialize_bytes(self, value: &[u8]) -> Result<Value, Error> {
let vec = value.iter().map(|&b| Value::Number(b.into())).collect();
Ok(Value::Array(vec))
}
#[inline]
fn serialize_unit(self) -> Result<Value, Error> {
Ok(Value::Null)
}
#[inline]
fn serialize_unit_struct(self, _name: &'static str) -> Result<Value, Error> {
self.serialize_unit()
}
#[inline]
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<Value, Error> {
self.serialize_str(variant)
}
#[inline]
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
value: &T,
) -> Result<Value, Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,<|fim▁hole|> ) -> Result<Value, Error>
where
T: Serialize,
{
let mut values = Map::new();
values.insert(String::from(variant), try!(to_value(&value)));
Ok(Value::Object(values))
}
#[inline]
fn serialize_none(self) -> Result<Value, Error> {
self.serialize_unit()
}
#[inline]
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Value, Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Error> {
Ok(SerializeVec { vec: Vec::with_capacity(len.unwrap_or(0)) })
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Error> {
Ok(
SerializeTupleVariant {
name: String::from(variant),
vec: Vec::with_capacity(len),
},
)
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Error> {
Ok(
SerializeMap {
map: Map::new(),
next_key: None,
},
)
}
fn serialize_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, Error> {
self.serialize_map(Some(len))
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Error> {
Ok(
SerializeStructVariant {
name: String::from(variant),
map: Map::new(),
},
)
}
}
#[doc(hidden)]
pub struct SerializeVec {
vec: Vec<Value>,
}
#[doc(hidden)]
pub struct SerializeTupleVariant {
name: String,
vec: Vec<Value>,
}
#[doc(hidden)]
pub struct SerializeMap {
map: Map<String, Value>,
next_key: Option<String>,
}
#[doc(hidden)]
pub struct SerializeStructVariant {
name: String,
map: Map<String, Value>,
}
impl serde::ser::SerializeSeq for SerializeVec {
type Ok = Value;
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
self.vec.push(try!(to_value(&value)));
Ok(())
}
fn end(self) -> Result<Value, Error> {
Ok(Value::Array(self.vec))
}
}
impl serde::ser::SerializeTuple for SerializeVec {
type Ok = Value;
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
serde::ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Value, Error> {
serde::ser::SerializeSeq::end(self)
}
}
impl serde::ser::SerializeTupleStruct for SerializeVec {
type Ok = Value;
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
serde::ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Value, Error> {
serde::ser::SerializeSeq::end(self)
}
}
impl serde::ser::SerializeTupleVariant for SerializeTupleVariant {
type Ok = Value;
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
self.vec.push(try!(to_value(&value)));
Ok(())
}
fn end(self) -> Result<Value, Error> {
let mut object = Map::new();
object.insert(self.name, Value::Array(self.vec));
Ok(Value::Object(object))
}
}
impl serde::ser::SerializeMap for SerializeMap {
type Ok = Value;
type Error = Error;
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Error>
where
T: Serialize,
{
match try!(to_value(&key)) {
Value::String(s) => self.next_key = Some(s),
Value::Number(n) => {
if n.is_u64() || n.is_i64() {
self.next_key = Some(n.to_string())
} else {
return Err(Error::syntax(ErrorCode::KeyMustBeAString, 0, 0));
}
}
_ => return Err(Error::syntax(ErrorCode::KeyMustBeAString, 0, 0)),
};
Ok(())
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
where
T: Serialize,
{
let key = self.next_key.take();
// Panic because this indicates a bug in the program rather than an
// expected failure.
let key = key.expect("serialize_value called before serialize_key");
self.map.insert(key, try!(to_value(&value)));
Ok(())
}
fn end(self) -> Result<Value, Error> {
Ok(Value::Object(self.map))
}
}
impl serde::ser::SerializeStruct for SerializeMap {
type Ok = Value;
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Error>
where
T: Serialize,
{
try!(serde::ser::SerializeMap::serialize_key(self, key));
serde::ser::SerializeMap::serialize_value(self, value)
}
fn end(self) -> Result<Value, Error> {
serde::ser::SerializeMap::end(self)
}
}
impl serde::ser::SerializeStructVariant for SerializeStructVariant {
type Ok = Value;
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Error>
where
T: Serialize,
{
self.map
.insert(String::from(key), try!(to_value(&value)));
Ok(())
}
fn end(self) -> Result<Value, Error> {
let mut object = Map::new();
object.insert(self.name, Value::Object(self.map));
Ok(Value::Object(object))
}
}<|fim▁end|>
|
_variant_index: u32,
variant: &'static str,
value: &T,
|
<|file_name|>main-en.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
import "@/lang/locale/en"
import './main'
|
<|file_name|>CloseableDisposerRecord.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2005 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.<|fim▁hole|> * CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package com.sun.imageio.stream;
import java.io.Closeable;
import java.io.IOException;
import sun.java2d.DisposerRecord;
/**
* Convenience class that closes a given resource (e.g. RandomAccessFile),
* typically associated with an Image{Input,Output}Stream, prior to the
* stream being garbage collected.
*/
public class CloseableDisposerRecord implements DisposerRecord {
private Closeable closeable;
public CloseableDisposerRecord(Closeable closeable) {
this.closeable = closeable;
}
public synchronized void dispose() {
if (closeable != null) {
try {
closeable.close();
} catch (IOException e) {
} finally {
closeable = null;
}
}
}
}<|fim▁end|>
|
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
|
<|file_name|>sbox.rs<|end_file_name|><|fim▁begin|>use std::collections::BTreeMap;
lazy_static! {
pub static ref S_BOX: SBox = SBox::new();
}
pub struct SBox {
s_map_enc: BTreeMap<u8, u8>,
s_map_dec: BTreeMap<u8, u8>,
}
impl SBox {
fn new() -> Self {
let mut tmp_map_enc = BTreeMap::new();
tmp_map_enc.insert(0, 12);
tmp_map_enc.insert(1, 5);
tmp_map_enc.insert(2, 6);
tmp_map_enc.insert(3, 11);
tmp_map_enc.insert(4, 9);
tmp_map_enc.insert(5, 0);
tmp_map_enc.insert(6, 10);
tmp_map_enc.insert(7, 13);
tmp_map_enc.insert(8, 3);
tmp_map_enc.insert(9, 14);
tmp_map_enc.insert(10, 15);
tmp_map_enc.insert(11, 8);
tmp_map_enc.insert(12, 4);
tmp_map_enc.insert(13, 7);
tmp_map_enc.insert(14, 1);
tmp_map_enc.insert(15, 2);
let mut tmp_map_dec = BTreeMap::new();
tmp_map_dec.insert(0, 5);
tmp_map_dec.insert(1, 14);
tmp_map_dec.insert(2, 15);
tmp_map_dec.insert(3, 8);
tmp_map_dec.insert(4, 12);
tmp_map_dec.insert(5, 1);
tmp_map_dec.insert(6, 2);
tmp_map_dec.insert(7, 13);
tmp_map_dec.insert(8, 11);
tmp_map_dec.insert(9, 4);
tmp_map_dec.insert(10, 6);
tmp_map_dec.insert(11, 3);
tmp_map_dec.insert(12, 0);
tmp_map_dec.insert(13, 7);
tmp_map_dec.insert(14, 9);
tmp_map_dec.insert(15, 10);
SBox {
s_map_enc: tmp_map_enc,
s_map_dec: tmp_map_dec
}
}
pub fn apply_enc(&self, input: u8) -> u8 {
*self.s_map_enc.get(&input).expect("Logic error! Invalid S-Box input! (enc)")
}
pub fn apply_dec(&self, input: u8) -> u8 {
*self.s_map_dec.get(&input).expect("Logic error! Invalid S-Box input! (dec)")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_that_sbox_gives_correct_outputs() {
assert_eq!(S_BOX.apply_enc(0), 12);
assert_eq!(S_BOX.apply_enc(1), 5);
assert_eq!(S_BOX.apply_enc(2), 6);
assert_eq!(S_BOX.apply_enc(3), 11);
assert_eq!(S_BOX.apply_enc(4), 9);
assert_eq!(S_BOX.apply_enc(5), 0);
assert_eq!(S_BOX.apply_enc(6), 10);
assert_eq!(S_BOX.apply_enc(7), 13);
assert_eq!(S_BOX.apply_enc(8), 3);
assert_eq!(S_BOX.apply_enc(9), 14);
assert_eq!(S_BOX.apply_enc(10), 15);
assert_eq!(S_BOX.apply_enc(11), 8);
assert_eq!(S_BOX.apply_enc(12), 4);
assert_eq!(S_BOX.apply_enc(13), 7);
assert_eq!(S_BOX.apply_enc(14), 1);
assert_eq!(S_BOX.apply_enc(15), 2);
}
#[test]
fn test_that_inverse_sbox_gives_correct_outputs() {
assert_eq!(S_BOX.apply_dec(0), 5);
assert_eq!(S_BOX.apply_dec(1), 14);
assert_eq!(S_BOX.apply_dec(2), 15);
assert_eq!(S_BOX.apply_dec(3), 8);
assert_eq!(S_BOX.apply_dec(4), 12);
assert_eq!(S_BOX.apply_dec(5), 1);
assert_eq!(S_BOX.apply_dec(6), 2);
assert_eq!(S_BOX.apply_dec(7), 13);
assert_eq!(S_BOX.apply_dec(8), 11);
assert_eq!(S_BOX.apply_dec(9), 4);
assert_eq!(S_BOX.apply_dec(10), 6);
assert_eq!(S_BOX.apply_dec(11), 3);
assert_eq!(S_BOX.apply_dec(12), 0);
assert_eq!(S_BOX.apply_dec(13), 7);
assert_eq!(S_BOX.apply_dec(14), 9);
assert_eq!(S_BOX.apply_dec(15), 10);
}
#[test]
#[should_panic]
fn test_that_invalid_input_panics() {
S_BOX.apply_enc(16);
}
#[test]<|fim▁hole|> }
}<|fim▁end|>
|
#[should_panic]
fn test_that_invalid_input_panics_inverse() {
S_BOX.apply_dec(42);
|
<|file_name|>continuationIndentForCallInStatementPart_after.py<|end_file_name|><|fim▁begin|>for item in really_long_name_of_the_function_with_a_lot_of_patams(
param1, param2, param3):<|fim▁hole|><|fim▁end|>
|
pass
|
<|file_name|>dual_variable_warm_start_slack_osqp_interface.cc<|end_file_name|><|fim▁begin|>/******************************************************************************
* Copyright 2018 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
/*
* @file
*/
#include "modules/planning/open_space/trajectory_smoother/dual_variable_warm_start_slack_osqp_interface.h"
#include <algorithm>
#include "cyber/common/log.h"
#include "modules/common/configs/vehicle_config_helper.h"
#include "modules/common/math/math_utils.h"
#include "modules/common/util/util.h"
#include "modules/planning/common/planning_gflags.h"
namespace apollo {
namespace planning {
DualVariableWarmStartSlackOSQPInterface::
DualVariableWarmStartSlackOSQPInterface(
size_t horizon, double ts, const Eigen::MatrixXd& ego,
const Eigen::MatrixXi& obstacles_edges_num, const size_t obstacles_num,
const Eigen::MatrixXd& obstacles_A, const Eigen::MatrixXd& obstacles_b,
const Eigen::MatrixXd& xWS,
const PlannerOpenSpaceConfig& planner_open_space_config)
: ts_(ts),
ego_(ego),
obstacles_edges_num_(obstacles_edges_num),
obstacles_A_(obstacles_A),
obstacles_b_(obstacles_b),
xWS_(xWS) {
ACHECK(horizon < std::numeric_limits<int>::max())
<< "Invalid cast on horizon in open space planner";
horizon_ = static_cast<int>(horizon);
ACHECK(obstacles_num < std::numeric_limits<int>::max())
<< "Invalid cast on obstacles_num in open space planner";
obstacles_num_ = static_cast<int>(obstacles_num);
w_ev_ = ego_(1, 0) + ego_(3, 0);
l_ev_ = ego_(0, 0) + ego_(2, 0);
g_ = {l_ev_ / 2, w_ev_ / 2, l_ev_ / 2, w_ev_ / 2};
offset_ = (ego_(0, 0) + ego_(2, 0)) / 2 - ego_(2, 0);
obstacles_edges_sum_ = obstacles_edges_num_.sum();
l_start_index_ = 0;
n_start_index_ = l_start_index_ + obstacles_edges_sum_ * (horizon_ + 1);
s_start_index_ = n_start_index_ + 4 * obstacles_num_ * (horizon_ + 1);
l_warm_up_ = Eigen::MatrixXd::Zero(obstacles_edges_sum_, horizon_ + 1);
n_warm_up_ = Eigen::MatrixXd::Zero(4 * obstacles_num_, horizon_ + 1);
slacks_ = Eigen::MatrixXd::Zero(obstacles_num_, horizon_ + 1);
// get_nlp_info
lambda_horizon_ = obstacles_edges_sum_ * (horizon_ + 1);
miu_horizon_ = obstacles_num_ * 4 * (horizon_ + 1);
slack_horizon_ = obstacles_num_ * (horizon_ + 1);
// number of variables
num_of_variables_ = lambda_horizon_ + miu_horizon_ + slack_horizon_;
// number of constraints
num_of_constraints_ = 3 * obstacles_num_ * (horizon_ + 1) + num_of_variables_;
min_safety_distance_ =
planner_open_space_config.dual_variable_warm_start_config()
.min_safety_distance();
check_mode_ =
planner_open_space_config.dual_variable_warm_start_config().debug_osqp();
beta_ = planner_open_space_config.dual_variable_warm_start_config().beta();
osqp_config_ =
planner_open_space_config.dual_variable_warm_start_config().osqp_config();
}
void DualVariableWarmStartSlackOSQPInterface::printMatrix(
const int r, const int c, const std::vector<c_float>& P_data,
const std::vector<c_int>& P_indices, const std::vector<c_int>& P_indptr) {
Eigen::MatrixXf tmp = Eigen::MatrixXf::Zero(r, c);
for (size_t i = 0; i < P_indptr.size() - 1; ++i) {
if (P_indptr[i] < 0 || P_indptr[i] >= static_cast<int>(P_indices.size())) {
continue;
}
for (auto idx = P_indptr[i]; idx < P_indptr[i + 1]; ++idx) {
int tmp_c = static_cast<int>(i);
int tmp_r = static_cast<int>(P_indices[idx]);
tmp(tmp_r, tmp_c) = static_cast<float>(P_data[idx]);
}
}
AINFO << "row number: " << r;
AINFO << "col number: " << c;
for (int i = 0; i < r; ++i) {
AINFO << "row number: " << i;
AINFO << tmp.row(i);
}
}
void DualVariableWarmStartSlackOSQPInterface::assembleA(
const int r, const int c, const std::vector<c_float>& P_data,
const std::vector<c_int>& P_indices, const std::vector<c_int>& P_indptr) {
constraint_A_ = Eigen::MatrixXf::Zero(r, c);
for (size_t i = 0; i < P_indptr.size() - 1; ++i) {
if (P_indptr[i] < 0 || P_indptr[i] >= static_cast<int>(P_indices.size())) {
continue;
}
for (auto idx = P_indptr[i]; idx < P_indptr[i + 1]; ++idx) {
int tmp_c = static_cast<int>(i);
int tmp_r = static_cast<int>(P_indices[idx]);
constraint_A_(tmp_r, tmp_c) = static_cast<float>(P_data[idx]);
}
}
}
bool DualVariableWarmStartSlackOSQPInterface::optimize() {
// int kNumParam = num_of_variables_;
// int kNumConst = num_of_constraints_;
bool succ = true;
// assemble P, quadratic term in objective
std::vector<c_float> P_data;
std::vector<c_int> P_indices;
std::vector<c_int> P_indptr;
assembleP(&P_data, &P_indices, &P_indptr);
if (check_mode_) {
AINFO << "print P_data in whole: ";
printMatrix(num_of_variables_, num_of_variables_, P_data, P_indices,
P_indptr);
}
// assemble q, linear term in objective, \sum{beta * slacks}
c_float q[num_of_variables_]; // NOLINT
for (int i = 0; i < num_of_variables_; ++i) {
q[i] = 0.0;
if (i >= s_start_index_) {
q[i] = beta_;
}
}
// assemble A, linear term in constraints
std::vector<c_float> A_data;
std::vector<c_int> A_indices;
std::vector<c_int> A_indptr;
assembleConstraint(&A_data, &A_indices, &A_indptr);
if (check_mode_) {
AINFO << "print A_data in whole: ";
printMatrix(num_of_constraints_, num_of_variables_, A_data, A_indices,
A_indptr);
assembleA(num_of_constraints_, num_of_variables_, A_data, A_indices,
A_indptr);
}
// assemble lb & ub, slack_variable <= 0
c_float lb[num_of_constraints_]; // NOLINT
c_float ub[num_of_constraints_]; // NOLINT
int slack_indx = num_of_constraints_ - slack_horizon_;
for (int i = 0; i < num_of_constraints_; ++i) {
lb[i] = 0.0;
if (i >= slack_indx) {
lb[i] = -2e19;
}
if (i < 2 * obstacles_num_ * (horizon_ + 1)) {
ub[i] = 0.0;
} else if (i < slack_indx) {
ub[i] = 2e19;
} else {
ub[i] = 0.0;
}
}
// Problem settings
OSQPSettings* settings =
reinterpret_cast<OSQPSettings*>(c_malloc(sizeof(OSQPSettings)));
// Define Solver settings as default
osqp_set_default_settings(settings);
settings->alpha = osqp_config_.alpha(); // Change alpha parameter
settings->eps_abs = osqp_config_.eps_abs();
settings->eps_rel = osqp_config_.eps_rel();
settings->max_iter = osqp_config_.max_iter();
settings->polish = osqp_config_.polish();
settings->verbose = osqp_config_.osqp_debug_log();
// Populate data
OSQPData* data = reinterpret_cast<OSQPData*>(c_malloc(sizeof(OSQPData)));
data->n = num_of_variables_;
data->m = num_of_constraints_;
data->P = csc_matrix(data->n, data->n, P_data.size(), P_data.data(),
P_indices.data(), P_indptr.data());
data->q = q;
data->A = csc_matrix(data->m, data->n, A_data.size(), A_data.data(),
A_indices.data(), A_indptr.data());
data->l = lb;
data->u = ub;
// Workspace
OSQPWorkspace* work = nullptr;
// osqp_setup(&work, data, settings);
work = osqp_setup(data, settings);
// Solve Problem
osqp_solve(work);
// check state
if (work->info->status_val != 1 && work->info->status_val != 2) {
AWARN << "OSQP dual warm up unsuccess, "
<< "return status: " << work->info->status;
succ = false;
}
// transfer to make lambda's norm under 1
std::vector<double> lambda_norm;
int l_index = l_start_index_;
for (int i = 0; i < horizon_ + 1; ++i) {
int edges_counter = 0;
for (int j = 0; j < obstacles_num_; ++j) {
int current_edges_num = obstacles_edges_num_(j, 0);
Eigen::MatrixXd Aj =
obstacles_A_.block(edges_counter, 0, current_edges_num, 2);
double tmp1 = 0;
double tmp2 = 0;
for (int k = 0; k < current_edges_num; ++k) {
tmp1 += Aj(k, 0) * work->solution->x[l_index + k];
tmp2 += Aj(k, 1) * work->solution->x[l_index + k];
}
// norm(A * lambda)
double tmp_norm = tmp1 * tmp1 + tmp2 * tmp2;
if (tmp_norm >= 1e-5) {
lambda_norm.push_back(0.75 / std::sqrt(tmp_norm));
} else {
lambda_norm.push_back(0.0);
}
edges_counter += current_edges_num;
l_index += current_edges_num;
}
}
// extract primal results
int variable_index = 0;
// 1. lagrange constraint l, [0, obstacles_edges_sum_ - 1] * [0,
// horizon_l
for (int i = 0; i < horizon_ + 1; ++i) {
int r_index = 0;
for (int j = 0; j < obstacles_num_; ++j) {
for (int k = 0; k < obstacles_edges_num_(j, 0); ++k) {
l_warm_up_(r_index, i) = lambda_norm[i * obstacles_num_ + j] *
work->solution->x[variable_index];
++variable_index;
++r_index;
}
}
}
// 2. lagrange constraint n, [0, 4*obstacles_num-1] * [0, horizon_]
for (int i = 0; i < horizon_ + 1; ++i) {
int r_index = 0;
for (int j = 0; j < obstacles_num_; ++j) {
for (int k = 0; k < 4; ++k) {
n_warm_up_(r_index, i) = lambda_norm[i * obstacles_num_ + j] *
work->solution->x[variable_index];
++r_index;
++variable_index;
}
}
}
// 3. slack variables
for (int i = 0; i < horizon_ + 1; ++i) {
for (int j = 0; j < obstacles_num_; ++j) {
slacks_(j, i) = lambda_norm[i * obstacles_num_ + j] *
work->solution->x[variable_index];
++variable_index;
}
}
succ = succ & (work->info->obj_val <= 1.0);
// Cleanup
osqp_cleanup(work);
c_free(data->A);
c_free(data->P);
c_free(data);
c_free(settings);
return succ;
}
void DualVariableWarmStartSlackOSQPInterface::checkSolution(
const Eigen::MatrixXd& l_warm_up, const Eigen::MatrixXd& n_warm_up) {
// TODO(Runxin): extend
}
void DualVariableWarmStartSlackOSQPInterface::assembleP(
std::vector<c_float>* P_data, std::vector<c_int>* P_indices,
std::vector<c_int>* P_indptr) {
// the objective function is norm(A' * lambda)
std::vector<c_float> P_tmp;
int edges_counter = 0;
for (int j = 0; j < obstacles_num_; ++j) {
int current_edges_num = obstacles_edges_num_(j, 0);
Eigen::MatrixXd Aj;
Aj = obstacles_A_.block(edges_counter, 0, current_edges_num, 2);
// Eigen::MatrixXd AAj(current_edges_num, current_edges_num);
Aj = Aj * Aj.transpose();
CHECK_EQ(current_edges_num, Aj.cols());
CHECK_EQ(current_edges_num, Aj.rows());
for (int c = 0; c < current_edges_num; ++c) {
for (int r = 0; r < current_edges_num; ++r) {
P_tmp.emplace_back(Aj(r, c));
}
}
// Update index
edges_counter += current_edges_num;
}
int l_index = l_start_index_;
int first_row_location = 0;
// the objective function is norm(A' * lambda)
for (int i = 0; i < horizon_ + 1; ++i) {
edges_counter = 0;
for (auto item : P_tmp) {
P_data->emplace_back(item);
}
// current assumption: stationary obstacles
for (int j = 0; j < obstacles_num_; ++j) {
int current_edges_num = obstacles_edges_num_(j, 0);
for (int c = 0; c < current_edges_num; ++c) {
P_indptr->emplace_back(first_row_location);
for (int r = 0; r < current_edges_num; ++r) {
P_indices->emplace_back(r + l_index);
}
first_row_location += current_edges_num;
}<|fim▁hole|> // Update index
edges_counter += current_edges_num;
l_index += current_edges_num;
}
}
CHECK_EQ(P_indptr->size(), static_cast<size_t>(lambda_horizon_));
for (int i = lambda_horizon_; i < num_of_variables_ + 1; ++i) {
P_indptr->emplace_back(first_row_location);
}
CHECK_EQ(P_data->size(), P_indices->size());
CHECK_EQ(P_indptr->size(), static_cast<size_t>(num_of_variables_) + 1);
}
void DualVariableWarmStartSlackOSQPInterface::assembleConstraint(
std::vector<c_float>* A_data, std::vector<c_int>* A_indices,
std::vector<c_int>* A_indptr) {
/*
* The constraint matrix is as the form,
* |R' * A', G', 0|, #: 2 * obstacles_num_ * (horizon_ + 1)
* |A * t - b, -g, I|, #: obstacles_num_ * (horizon_ + 1)
* |I, 0, 0|, #: num_of_lambda
* |0, I, 0|, #: num_of_miu
* |0, 0, I|, #: num_of_slack
*/
int r1_index = 0;
int r2_index = 2 * obstacles_num_ * (horizon_ + 1);
int r3_index = 3 * obstacles_num_ * (horizon_ + 1);
int r4_index = 3 * obstacles_num_ * (horizon_ + 1) + lambda_horizon_;
int r5_index = r4_index + miu_horizon_;
int first_row_location = 0;
// lambda variables
// lambda_horizon_ = obstacles_edges_sum_ * (horizon_ + 1);
for (int i = 0; i < horizon_ + 1; ++i) {
int edges_counter = 0;
Eigen::MatrixXd R(2, 2);
R << cos(xWS_(2, i)), sin(xWS_(2, i)), sin(xWS_(2, i)), cos(xWS_(2, i));
Eigen::MatrixXd t_trans(1, 2);
t_trans << (xWS_(0, i) + cos(xWS_(2, i)) * offset_),
(xWS_(1, i) + sin(xWS_(2, i)) * offset_);
// assume: stationary obstacles
for (int j = 0; j < obstacles_num_; ++j) {
int current_edges_num = obstacles_edges_num_(j, 0);
Eigen::MatrixXd Aj =
obstacles_A_.block(edges_counter, 0, current_edges_num, 2);
Eigen::MatrixXd bj =
obstacles_b_.block(edges_counter, 0, current_edges_num, 1);
Eigen::MatrixXd r1_block(2, current_edges_num);
r1_block = R * Aj.transpose();
Eigen::MatrixXd r2_block(1, current_edges_num);
r2_block = t_trans * Aj.transpose() - bj.transpose();
// insert into A matrix, col by col
for (int k = 0; k < current_edges_num; ++k) {
A_data->emplace_back(r1_block(0, k));
A_indices->emplace_back(r1_index);
A_data->emplace_back(r1_block(1, k));
A_indices->emplace_back(r1_index + 1);
A_data->emplace_back(r2_block(0, k));
A_indices->emplace_back(r2_index);
A_data->emplace_back(1.0);
A_indices->emplace_back(r3_index);
r3_index++;
A_indptr->emplace_back(first_row_location);
first_row_location += 4;
}
// Update index
edges_counter += current_edges_num;
r1_index += 2;
r2_index += 1;
}
}
// miu variables, miu_horizon_ = obstacles_num_ * 4 * (horizon_ + 1);
// G: ((1, 0, -1, 0), (0, 1, 0, -1))
// g: g_
r1_index = 0;
r2_index = 2 * obstacles_num_ * (horizon_ + 1);
for (int i = 0; i < horizon_ + 1; ++i) {
for (int j = 0; j < obstacles_num_; ++j) {
for (int k = 0; k < 4; ++k) {
// update G
if (k < 2) {
A_data->emplace_back(1.0);
} else {
A_data->emplace_back(-1.0);
}
A_indices->emplace_back(r1_index + k % 2);
// update g'
A_data->emplace_back(-g_[k]);
A_indices->emplace_back(r2_index);
// update I
A_data->emplace_back(1.0);
A_indices->emplace_back(r4_index);
r4_index++;
// update col index
A_indptr->emplace_back(first_row_location);
first_row_location += 3;
}
// update index
r1_index += 2;
r2_index += 1;
}
}
// slack variables
// slack_horizon_ = obstacles_edges_sum_ * (horizon_ + 1);
r2_index = 2 * obstacles_num_ * (horizon_ + 1);
for (int i = 0; i < horizon_ + 1; ++i) {
for (int j = 0; j < obstacles_num_; ++j) {
A_data->emplace_back(1.0);
A_indices->emplace_back(r2_index);
++r2_index;
A_data->emplace_back(1.0);
A_indices->emplace_back(r5_index);
++r5_index;
A_indptr->emplace_back(first_row_location);
first_row_location += 2;
}
}
A_indptr->emplace_back(first_row_location);
CHECK_EQ(A_data->size(), A_indices->size());
CHECK_EQ(A_indptr->size(), static_cast<size_t>(num_of_variables_) + 1);
}
void DualVariableWarmStartSlackOSQPInterface::get_optimization_results(
Eigen::MatrixXd* l_warm_up, Eigen::MatrixXd* n_warm_up,
Eigen::MatrixXd* s_warm_up) const {
*l_warm_up = l_warm_up_;
*n_warm_up = n_warm_up_;
*s_warm_up = slacks_;
// debug mode check slack values
double max_s = slacks_(0, 0);
double min_s = slacks_(0, 0);
for (int i = 0; i < horizon_ + 1; ++i) {
for (int j = 0; j < obstacles_num_; ++j) {
max_s = std::max(max_s, slacks_(j, i));
min_s = std::min(min_s, slacks_(j, i));
}
}
ADEBUG << "max_s: " << max_s;
ADEBUG << "min_s: " << min_s;
}
} // namespace planning
} // namespace apollo<|fim▁end|>
| |
<|file_name|>wif_test.go<|end_file_name|><|fim▁begin|>package enc
import (
"bytes"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"testing"
)<|fim▁hole|>
if testing.Short() {
t.Skip("skipping test in short mode.")
}
keys1, err := ecdsa.GenerateKey(elliptic.P384(), rand.Reader)
if err != nil {
t.Error(err.Error())
}
wif, err := EncodeWif(keys1)
if err != nil {
t.Error(err.Error())
}
keys2, err := DecodeWif(wif)
if err != nil {
t.Error(err.Error())
}
if bytes.Compare(keys1.D.Bytes(), keys2.D.Bytes()) != 0 {
t.Error("Private keys are different. Expected %x, got %x\n", keys1.D.Bytes(), keys2.D.Bytes())
}
if bytes.Compare(keys1.PublicKey.X.Bytes(), keys2.PublicKey.X.Bytes()) != 0 {
t.Error("Public point X are different. Expected %x, got %x\n", keys1.PublicKey.X.Bytes(), keys2.PublicKey.X.Bytes())
}
if bytes.Compare(keys1.PublicKey.Y.Bytes(), keys2.PublicKey.Y.Bytes()) != 0 {
t.Error("Public point Y are different. Expected %x, got %x\n", keys1.PublicKey.Y.Bytes(), keys2.PublicKey.Y.Bytes())
}
if !keys2.PublicKey.Curve.IsOnCurve(keys2.PublicKey.X, keys2.PublicKey.Y) {
t.Error("Public point is not on curve\n")
}
ok, err := ValidateWif(wif)
if err != nil {
t.Error(err.Error())
}
if !ok {
t.Error("Invalid checksum")
}
}<|fim▁end|>
|
func TestWIF(t *testing.T) {
|
<|file_name|>liveness-loop-break.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test() {
let v;
loop {
v = 3;
break;
}<|fim▁hole|>
pub fn main() {
test();
}<|fim▁end|>
|
debug!("%d", v);
}
|
<|file_name|>test_matcher.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib', 'pyscraper'))
from matcher import Matcher
import util as util
import unittest
class TestMatcher(unittest.TestCase):
@classmethod
def setUpClass(cls):
# This is required so that readScraper() can parse the XML instruction files
util.RCBHOME = os.path.join(os.path.dirname(__file__), '..', '..')
# Test matching against a result set
def test_getBestResultsWithRomanNumerals(self):
results = [{'SearchKey': ['Tekken 2']}, {'SearchKey': ['Tekken 3']}, {'SearchKey': ['Tekken IV']}]
gamename = 'Tekken II'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'Tekken 2')
def test_getBestResultsWithApostropheAndYear(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]
gamename = 'FIFA \'98'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == 'FIFA 98',
"Expected to match title (was {0})".format(x.get('SearchKey')[0]))
def test_getBestResultsMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = u'スーパー競輪'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertTrue(x.get('SearchKey')[0] == u'スーパー競輪', "Expected matching unicode strings to match")
def test_getBestResultsNonMatchingWithUnicode(self):
results = [{'SearchKey': [u'スーパー競輪']}]
gamename = 'Super Test Game'
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertIsNone(x, "Expected non-matching strings to not match, including unicode")
def test_getBestResultsWithBrackets(self):
results = [{'SearchKey': ['FIFA 98']}, {'SearchKey': ['FIFA 97']}, {'SearchKey': ['FIFA 2001']}]<|fim▁hole|>
m = Matcher()
x = m.getBestResults(results, gamename)
self.assertEquals(x.get('SearchKey')[0], 'FIFA 98')
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
gamename = 'FIFA \'98 (1998) [Electronic Arts]'
|
<|file_name|>compress.js<|end_file_name|><|fim▁begin|>if(typeof(Control)=='undefined')
Control={};
Control.TextArea=Class.create();
Object.extend(Control.TextArea.prototype,{
onChangeTimeoutLength:500,
element:false,
onChangeTimeout:false,
initialize:function(textarea){
this.element=$(textarea);
$(this.element).observe('keyup',this.doOnChange.bindAsEventListener(this));
$(this.element).observe('paste',this.doOnChange.bindAsEventListener(this));
$(this.element).observe('input',this.doOnChange.bindAsEventListener(this));
},
doOnChange:function(event){
if(this.onChangeTimeout)
window.clearTimeout(this.onChangeTimeout);
this.onChangeTimeout=window.setTimeout(function(){
if(this.notify)
this.notify('change',this.getValue());
}.bind(this),this.onChangeTimeoutLength);
},
getValue:function(){
return this.element.value;
},
getSelection:function(){
if(!!document.selection)
return document.selection.createRange().text;
else if(!!this.element.setSelectionRange)
return this.element.value.substring(this.element.selectionStart,this.element.selectionEnd);
else
return false;
},
replaceSelection:function(text){
var scrollTop=this.element.scrollTop;
if(!!document.selection){
this.element.focus();
var old=document.selection.createRange().text;
var range=document.selection.createRange();
range.text=text;
range-=old.length-text.length;
}else if(!!this.element.setSelectionRange){
var selection_start=this.element.selectionStart;
this.element.value=this.element.value.substring(0,selection_start)+text+this.element.value.substring(this.element.selectionEnd);
this.element.setSelectionRange(selection_start+text.length,selection_start+text.length);
}
this.doOnChange();
this.element.focus();
this.element.scrollTop=scrollTop;
},
wrapSelection:function(before,after){
this.replaceSelection(before+this.getSelection()+after);
},
insertBeforeSelection:function(text){
this.replaceSelection(text+this.getSelection());
},
insertAfterSelection:function(text){
this.replaceSelection(this.getSelection()+text);
},
injectEachSelectedLine:function(callback,before,after){
this.replaceSelection((before||'')+$A(this.getSelection().split("\n")).inject([],callback).join("\n")+(after||''));
},
insertBeforeEachSelectedLine:function(text,before,after){
this.injectEachSelectedLine(function(lines,line){
lines.push(text+line);
return lines;
},before,after);
}
});
if(typeof(Object.Event)!='undefined')
Object.Event.extend(Control.TextArea);Control.TextArea.BBCode=Class.create();
Object.extend(Control.TextArea.BBCode.prototype,{
textarea:false,
tooltip:false,
toolbar:false,
emotions:false,
wrapper:false,
controllers:false,
initialize:function(textarea){
this.textarea=new Control.TextArea(textarea);
this._initLayout();
this._initEmotions();
this._initToolbar();
},
hide:function(){
this.wrapper.parentNode.appendChild(this.textarea.element.remove());
this.wrapper.hide();
},
show:function(){
this.controllers.appendChild(this.textarea.element.remove());
this.wrapper.show();
},
_initLayout:function(){
this.wrapper=$(document.createElement('div'));
this.wrapper.id="editor_wrapper";
this.wrapper.className="clearfix";
this.textarea.element.parentNode.insertBefore(this.wrapper,this.textarea.element);
this.emotions=$(document.createElement('div'));
this.emotions.id="bbcode_emotions";
this.emotions.innerHTML="<h5>表情图标</h5>";
this.wrapper.appendChild(this.emotions);
this.controllers=$(document.createElement('div'));
this.controllers.id="bbcode_controllers";
this.wrapper.appendChild(this.controllers);
this.toolbar=$(document.createElement('div'));
this.toolbar.id="bbcode_toolbar";
this.controllers.appendChild(this.toolbar);
this.tooltip=$(document.createElement('div'));
this.tooltip.id="bbcode_tooltip";
this.tooltip.innerHTML="提示:选择您需要装饰的文字, 按上列按钮即可添加上相应的标签";
this.controllers.appendChild(this.tooltip);<|fim▁hole|>},
_initEmotions:function(){
this._addEmotion("biggrin",function(){this.insertAfterSelection(" :D ");});
this._addEmotion("smile",function(){this.insertAfterSelection(" :) ");});
this._addEmotion("sad",function(){this.insertAfterSelection(" :( ");});
this._addEmotion("surprised",function(){this.insertAfterSelection(" :o ");});
this._addEmotion("eek",function(){this.insertAfterSelection(" :shock: ");});
this._addEmotion("confused",function(){this.insertAfterSelection(" :? ");});
this._addEmotion("cool",function(){this.insertAfterSelection(" 8) ");});
this._addEmotion("lol",function(){this.insertAfterSelection(" :lol: ");});
this._addEmotion("mad",function(){this.insertAfterSelection(" :x ");});
this._addEmotion("razz",function(){this.insertAfterSelection(" :P ");});
this._addEmotion("redface",function(){this.insertAfterSelection(" :oops: ");});
this._addEmotion("cry",function(){this.insertAfterSelection(" :cry: ");});
this._addEmotion("evil",function(){this.insertAfterSelection(" :evil: ");});
this._addEmotion("twisted",function(){this.insertAfterSelection(" :twisted: ");});
this._addEmotion("rolleyes",function(){this.insertAfterSelection(" :roll: ");});
this._addEmotion("wink",function(){this.insertAfterSelection(" :wink: ");});
this._addEmotion("exclaim",function(){this.insertAfterSelection(" :!: ");});
this._addEmotion("question",function(){this.insertAfterSelection(" :?: ");});
this._addEmotion("idea",function(){this.insertAfterSelection(" :idea: ");});
this._addEmotion("arrow",function(){this.insertAfterSelection(" :arrow: ");});
},
_addEmotion:function(icon,callback){
var img=$(document.createElement('img'));
img.src="http://www.javaeye.com/images/smiles/icon_"+icon+".gif";
img.observe('click',callback.bindAsEventListener(this.textarea));
this.emotions.appendChild(img);
},
_initToolbar:function(){
this._addButton("B",function(){this.wrapSelection('[b]','[/b]');},function(){this.innerHTML='粗体: [b]文字[/b] (alt+b)';},{id:'button_bold'});
this._addButton("I",function(){this.wrapSelection('[i]','[/i]');},function(){this.innerHTML='斜体: [i]文字[/i] (alt+i)';},{id:'button_italic'});
this._addButton("U",function(){this.wrapSelection('[u]','[/u]');},function(){this.innerHTML='下划线: [u]文字[/u] (alt+u)';},{id:'button_underline'});
this._addButton("Quote",function(){this.wrapSelection('[quote]','[/quote]');},function(){this.innerHTML='引用文字: [quote]文字[/quote] 或者 [quote="javaeye"]文字[/quote] (alt+q)';});
this._addButton("Code",function(){this.wrapSelection('[code="java"]','[/code]');},function(){this.innerHTML='代码: [code="ruby"]...[/code] (支持java, ruby, js, xml, html, php, python, c, c++, c#, sql)';});
this._addButton("List",function(){this.insertBeforeEachSelectedLine('[*]','[list]\n','\n[/list]')},function(){this.innerHTML='列表: [list] [*]文字 [*]文字 [/list] 或者 顺序列表: [list=1] [*]文字 [*]文字 [/list]';});
this._addButton("Img",function(){this.wrapSelection('[img]','[/img]');},function(){this.innerHTML='插入图像: [img]http://image_url[/img] (alt+p)';});
this._addButton("URL",function(){this.wrapSelection('[url]','[/url]');},function(){this.innerHTML='插入URL: [url]http://url[/url] 或 [url=http://url]URL文字[/url] (alt+w)';});
this._addButton("Flash",function(){this.wrapSelection('[flash=200,200]','[/flash]');},function(){this.innerHTML='插入Flash: [flash=宽,高]http://your_flash.swf[/flash]';});
this._addButton("Table",function(){this.injectEachSelectedLine(function(lines,line){lines.push("|"+line+"|");return lines;},'[table]\n','\n[/table]');},function(){this.innerHTML='插入表格: [table]用换行和|来编辑格子[/table]';});
var color_select=[
"<br />字体颜色: ",
"<select id='select_color'>",
"<option value='black' style='color: black;'>标准</option>",
"<option value='darkred' style='color: darkred;'>深红</option>",
"<option value='red' style='color: red;'>红色</option>",
"<option value='orange' style='color: orange;'>橙色</option>",
"<option value='brown' style='color: brown;'>棕色</option>",
"<option value='yellow' style='color: yellow;'>黄色</option>",
"<option value='green' style='color: green;'>绿色</option>",
"<option value='olive' style='color: olive;'>橄榄</option>",
"<option value='cyan' style='color: cyan;'>青色</option>",
"<option value='blue' style='color: blue;'>蓝色</option>",
"<option value='darkblue' style='color: darkblue;'>深蓝</option>",
"<option value='indigo' style='color: indigo;'>靛蓝</option>",
"<option value='violet' style='color: violet;'>紫色</option>",
"<option value='gray' style='color: gray;'>灰色</option>",
"<option value='white' style='color: white;'>白色</option>",
"<option value='black' style='color: black;'>黑色</option>",
"</select>"
];
this.toolbar.insert(color_select.join(""));
$('select_color').observe('change',this._change_color.bindAsEventListener(this.textarea));
$('select_color').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="字体颜色: [color=red]文字[/color] 提示:您可以使用 color=#FF0000";});
var font_select=[
" 字体大小: ",
"<select id='select_font'>",
"<option value='0'>标准</option>",
"<option value='xx-small'>1 (xx-small)</option>",
"<option value='x-small'>2 (x-small)</option>",
"<option value='small'>3 (small)</option>",
"<option value='medium'>4 (medium)</option>",
"<option value='large'>5 (large)</option>",
"<option value='x-large'>6 (x-large)</option>",
"<option value='xx-large'>7 (xx-large)</option>",
"</select>"
];
this.toolbar.insert(font_select.join(""));
$('select_font').observe('change',this._change_font.bindAsEventListener(this.textarea));
$('select_font').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="字体大小: [size=x-small]小字体文字[/size]";});
var align_select=[
" 对齐: ",
"<select id='select_align'>",
"<option value='0'>标准</option>",
"<option value='left'>居左</option>",
"<option value='center'>居中</option>",
"<option value='right'>居右</option>",
"</select>"
]
this.toolbar.insert(align_select.join(""));
$('select_align').observe('change',this._change_align.bindAsEventListener(this.textarea));
$('select_align').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="对齐: [align=center]文字[/align]";});
},
_addButton:function(value,callback,tooltip,attrs){
var input=$(document.createElement('input'));
input.type="button";
input.value=value;
input.observe('click',callback.bindAsEventListener(this.textarea));
input.observe('mouseover',tooltip.bindAsEventListener(this.tooltip));
Object.extend(input,attrs||{});
this.toolbar.appendChild(input);
},
_change_color:function(){
this.wrapSelection('[color='+$F('select_color')+']','[/color]');
$('select_color').selectedIndex=0;
},
_change_font:function(){
this.wrapSelection('[size='+$F('select_font')+']','[/size]');
$('select_font').selectedIndex=0;
},
_change_align:function(){
this.wrapSelection('[align='+$F('select_align')+']','[/align]');
$('select_align').selectedIndex=0;
}
});if(typeof(tinyMCE)!='undefined'){
tinyMCE.init({
plugins:"javaeye,media,table,emotions,contextmenu,fullscreen,inlinepopups",
mode:"none",
language:"zh",
theme:"advanced",
theme_advanced_buttons1:"formatselect,fontselect,fontsizeselect,separator,forecolor,backcolor,separator,bold,italic,underline,strikethrough,separator,bullist,numlist",
theme_advanced_buttons2:"undo,redo,cut,copy,paste,separator,justifyleft,justifycenter,justifyright,separator,outdent,indent,separator,link,unlink,image,media,emotions,table,separator,quote,code,separator,fullscreen",
theme_advanced_buttons3:"",
theme_advanced_toolbar_location:"top",
theme_advanced_toolbar_align:"left",
theme_advanced_fonts:"宋体=宋体;黑体=黑体;仿宋=仿宋;楷体=楷体;隶书=隶书;幼圆=幼圆;Arial=arial,helvetica,sans-serif;Comic Sans MS=comic sans ms,sans-serif;Courier New=courier new,courier;Tahoma=tahoma,arial,helvetica,sans-serif;Times New Roman=times new roman,times;Verdana=verdana,geneva;Webdings=webdings;Wingdings=wingdings,zapf dingbats",
convert_fonts_to_spans:true,
remove_trailing_nbsp:true,
remove_linebreaks:false,
width:"100%",
extended_valid_elements:"pre[name|class],object[classid|codebase|width|height|align],param[name|value],embed[quality|type|pluginspage|width|height|src|align|wmode]",
relative_urls:false,
content_css:"/javascripts/tinymce/plugins/javaeye/css/content.css",
save_callback:"removeBRInPre"
});
}
function removeBRInPre(element_id,html,body){
return html.replace(/<pre([^>]*)>((?:.|\n)*?)<\/pre>/gi,function(a,b,c){
c=c.replace(/<br\s*\/?>\n*/gi,'\n');
return'<pre'+b+'>'+c+'</pre>';
});
}
Control.TextArea.Editor=Class.create();
Object.extend(Control.TextArea.Editor.prototype,{
bbcode_editor:false,
rich_editor:false,
mode:false,
in_preview:false,
initialize:function(textarea,mode,autosave){
this.editor_bbcode_flag=$("editor_bbcode_flag");
this.textarea=textarea;
this.switchMode(mode);
if(autosave)this._initAutosave();
},
switchMode:function(mode,convert){
if(this.in_preview&&this.mode==mode){
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").removeClassName("activetab");
$("editor_tab_"+mode).addClassName("activetab");
$("editor_preview").hide();
$("editor_main").show();
this.in_preview=false;
return;
}
if(this.mode==mode)return;
if(convert){
var old_text=this.getValue();
if(old_text!=""){
if(!confirm("切换编辑器模式可能导致格式和内容丢失,你确定吗?"))return;
$('editor_switch_spinner').show();
}
}
this.mode=mode;
if($("editor_switch")){
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").removeClassName("activetab");
$("editor_tab_"+mode).addClassName("activetab");
$("editor_preview").hide();
$("editor_main").show();
this.in_preview=false;
}
if(this.mode=="rich"){
this.editor_bbcode_flag.value="false";
if(this.bbcode_editor)this.bbcode_editor.hide();
this.rich_editor=true;
tinyMCE.execCommand('mceAddControl',false,this.textarea);
}else{
this.editor_bbcode_flag.value="true";
if(this.rich_editor)tinyMCE.execCommand('mceRemoveControl',false,this.textarea);
this.bbcode_editor?this.bbcode_editor.show():this.bbcode_editor=new Control.TextArea.BBCode(this.textarea);
}
if(convert&&old_text!=""){
new Ajax.Request(this.mode=="rich"?'/editor/bbcode2html':'/editor/html2bbcode',{
method:'post',
parameters:{text:old_text},
asynchronous:true,
onSuccess:function(transport){this.setValue(transport.responseText);$('editor_switch_spinner').hide();}.bind(this)
});
}
},
getValue:function(){
return this.mode=="bbcode"?this.bbcode_editor.textarea.element.value:tinyMCE.activeEditor.getContent();
},
setValue:function(value){
if(this.mode=="bbcode"){
this.bbcode_editor.textarea.element.value=value;
}else{
tinyMCE.get(this.textarea).setContent(value);
}
},
preview:function(){
this.in_preview=true;
$('editor_switch_spinner').show();
$("editor_preview").show();
$("editor_main").hide();
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").addClassName("activetab");
new Ajax.Updater("editor_preview","/editor/preview",{
parameters:{text:this.getValue(),mode:this.mode},
evalScripts:true,
onSuccess:function(){$('editor_switch_spinner').hide();}
});
},
insertImage:function(url){
if(this.mode=="bbcode"){
this.bbcode_editor.textarea.insertAfterSelection("\n[img]"+url+"[/img]\n");
}else{
tinyMCE.execCommand("mceInsertContent", false, "<br/><img src='"+url+"'/><br/> ");
}
},
_initAutosave:function(){
this.autosave_url=window.location.href;
new Ajax.Request('/editor/check_autosave',{
method:'post',
parameters:{url:this.autosave_url},
asynchronous:true,
onSuccess:this._loadAutosave.bind(this)
});
setInterval(this._autosave.bind(this),90*1000);
},
_loadAutosave:function(transport){
var text=transport.responseText;
if(text!="nil"){
eval("this.auto_save = "+text);
$('editor_auto_save_update').update('<span style="color:red">您有一份自动保存于'+this.auto_save.updated_at+'的草稿,<a href="#" onclick=\'editor._setAutosave();return false;\'>恢复</a>还是<a href="#" onclick=\'editor._discardAutosave();return false;\'>丢弃</a>呢?</span>');
}
},
_setAutosave:function(){
$("editor_auto_save_id").value=this.auto_save.id;
$('editor_auto_save_update').update("");
this.auto_save.bbcode?this.switchMode("bbcode"):this.switchMode("rich");
this.setValue(this.auto_save.body);
},
_discardAutosave:function(){
$("editor_auto_save_id").value=this.auto_save.id;
$('editor_auto_save_update').update("");
},
_autosave:function(){
var body=this.getValue();
if(body.length<100)return;
new Ajax.Request('/editor/autosave',{
method:'post',
parameters:{
url:this.autosave_url,
body:body,
bbcode:this.mode=="bbcode"
},
asynchronous:true,
onSuccess:function(transport){
$('editor_auto_save_id').value=transport.responseText;
$('editor_auto_save_update').update('<span style="color:red">JavaEye编辑器帮您自动保存草稿于:'+new Date().toLocaleString()+'</span>');
}
});
}
});<|fim▁end|>
|
this.controllers.appendChild(this.textarea.element.remove());
|
<|file_name|>crack.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import rainbow<|fim▁hole|>import string
import time
import random
"""SHA-256 hash function
Precondition: Input plaintext as string
Postcondition: Returns hash as string
"""
def sha256(plaintext):
return hashlib.sha256(bytes(plaintext, 'utf-8')).hexdigest()
"""Returns a reduction function which generates an n-digit lowercase password from a hash
"""
def reduce_lower(n):
"""Reduction function
Precondition: hash is H(previousPlaintext)
Postcondition: returns randomly distributed n-digit lowercase plaintext password
"""
def result(hash, col):
plaintextKey = (int(hash[:9], 16) ^ col) % (26 ** n)
plaintext = ""
for _ in range(n):
plaintext += string.ascii_lowercase[plaintextKey % 26]
plaintextKey //= 26
return plaintext
return result
"""Returns a function which generates a random n-digit lowercase password
"""
def gen_lower(n):
def result():
password = ""
for _ in range(n):
password += random.choice(string.ascii_lowercase)
return password
return result
"""Precondition: Input a function which generates a random password, or input no arguments to generate a random password
Postcondition: Cracks H(password) and prints elapsed time
"""
def test(table, hash_function, gen_password_function, password=""):
if password == "":
password = gen_password_function()
print("Cracking password: {0}\nH(password): {1}".format(password, hash_function(password)))
cracked = table.crack(hash_function(password))
if cracked:
print("Success! Password: {0}".format(cracked))
return True
else:
print("Unsuccessful :(")
return False
# Tests random passwords multiple times and prints success rate and average crack time.
def bulk_test(table, hash_function, gen_password_function, numTests):
start = time.time()
numSuccess = 0
for i in range(numTests):
print("\nTest {0} of {1}".format(i + 1, numTests))
numSuccess += test(table, hash_function, gen_password_function)
print("""\n{0} out of {1} random hashes were successful!\n
Average time per hash (including failures): {2} secs.""" \
.format(numSuccess, numTests, (time.time() - start) / numTests))
table = rainbow.RainbowTable(sha256, reduce_lower(4), gen_lower(4))<|fim▁end|>
|
import hashlib
|
<|file_name|>test_20_actor.py<|end_file_name|><|fim▁begin|>from vertebra.actor import actor
class test_00_actor:
def test_00_instantiate(self):<|fim▁hole|><|fim▁end|>
|
"""actor: can instantiate a base actor"""
a = actor()
assert isinstance(a,actor), "instantiated actor is actually an actor"
|
<|file_name|>algo.py<|end_file_name|><|fim▁begin|>import numpy as np
import scipy.linalg as la
def calculate_vertex_normals(verts, tris):
v_array = np.array(verts)
tri_array = np.array(tris, dtype=int)
tri_pts = v_array[tri_array]
n = np.cross( tri_pts[:,1] - tri_pts[:,0],
tri_pts[:,2] - tri_pts[:,0])
v_normals = np.zeros(v_array.shape)<|fim▁hole|>
nrms = np.sqrt(v_normals[:,0]**2 + v_normals[:,1]**2 + v_normals[:,2]**2)
v_normals = v_normals / nrms.reshape((-1,1))
return v_normals<|fim▁end|>
|
for i in range(tri_array.shape[0]):
for j in tris[i]:
v_normals[j,:] += n[i,:]
|
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>extern crate serde;
extern crate serde_json;
use std::io;
use std::io::Read;
use std::fs::File;
use std::error::Error;
include!(concat!(env!("OUT_DIR"), "/config.rs"));
#[derive(Debug)]
pub struct Config {
pub file_path: String,
pub auth: String,
}
impl Config {
pub fn load_from_file(path: &str) -> Result<Config, io::Error> {
// TEST SECTION
let point = ConfigFile { file_path: "data".to_string(), auth_file: "auth_file".to_string() };
let serialized = serde_json::to_string(&point).unwrap();
println!("{}", serialized);
let deserialized: ConfigFile = serde_json::from_str(&serialized).unwrap();
println!("{:?}", deserialized);
// END TEST SECTION
let mut file = try!(File::open(path));
let mut data = String::new();<|fim▁hole|> io::Error::new(io::ErrorKind::Other, err.description())
})
);
let mut auth_file = try!(File::open(deserialized.auth_file));
let mut auth_code = String::new();
try!(auth_file.read_to_string(& mut auth_code));
Ok(Config {
file_path: deserialized.file_path,
auth: auth_code,
})
}
}<|fim▁end|>
|
try!(file.read_to_string(& mut data));
let deserialized: ConfigFile = try!(
serde_json::from_str(&data).map_err(|err| {
|
<|file_name|>AuthServiceImpl.java<|end_file_name|><|fim▁begin|>package com.lyubenblagoev.postfixrest.service;
import com.lyubenblagoev.postfixrest.entity.User;
import com.lyubenblagoev.postfixrest.security.JwtTokenProvider;
import com.lyubenblagoev.postfixrest.security.RefreshTokenProvider;
import com.lyubenblagoev.postfixrest.security.UserPrincipal;
import com.lyubenblagoev.postfixrest.service.model.AuthResponse;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityNotFoundException;
import java.util.Optional;
@Service
@Transactional(readOnly = true)
public class AuthServiceImpl implements AuthService {
private final JwtTokenProvider jwtTokenProvider;
private final RefreshTokenProvider refreshTokenProvider;
private final UserService userService;
public AuthServiceImpl(JwtTokenProvider jwtTokenProvider,
RefreshTokenProvider refreshTokenProvider,
UserService userService) {
this.jwtTokenProvider = jwtTokenProvider;
this.refreshTokenProvider = refreshTokenProvider;
this.userService = userService;<|fim▁hole|> Optional<User> userOptional = userService.findByEmail(email);
if (userOptional.isEmpty()) {
throw new EntityNotFoundException("Failed to find user with email " + email);
}
UserPrincipal userPrincipal = new UserPrincipal(userOptional.get());
String token = jwtTokenProvider.createToken(userPrincipal.getUsername(), userPrincipal.getAuthorities());
RefreshTokenProvider.RefreshToken refreshToken = refreshTokenProvider.createToken();
return new AuthResponse(token, refreshToken.getToken(), refreshToken.getExpirationDate());
}
}<|fim▁end|>
|
}
@Override
public AuthResponse createTokens(String email) {
|
<|file_name|>core_depthplot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env pythonw
#from __future__ import print_function
import sys
import wx
import os
import matplotlib
if matplotlib.get_backend() != "WXAgg":
matplotlib.use("WXAgg")
import matplotlib.pyplot as plt
from pmagpy import pmagplotlib
import pmagpy.command_line_extractor as extractor
import pmagpy.ipmag as ipmag
import dialogs.pmag_widgets as pw
import dialogs.pmag_menu_dialogs as pmag_menu_dialogs
def main():
"""
NAME
core_depthplot.py
DESCRIPTION
plots various measurements versus core_depth or age. plots data flagged as 'FS-SS-C' as discrete samples.
SYNTAX
core_depthplot.py [command line options]
# or, for Anaconda users:
core_depthplot_anaconda [command line options]
OPTIONS
-h prints help message and quits
-f FILE: specify input measurments format file
-fsum FILE: specify input LIMS database (IODP) core summary csv file
-fwig FILE: specify input depth,wiggle to plot, in magic format with sample_core_depth key for depth
-fsa FILE: specify input er_samples format file from magic for depth
-fa FILE: specify input ages format file from magic for age
NB: must have either -fsa OR -fa (not both)
-fsp FILE sym size: specify input zeq_specimen format file from magic, sym and size
NB: PCAs will have specified color, while fisher means will be white with specified color as the edgecolor
-fres FILE specify input pmag_results file from magic, sym and size
-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot
-S do not plot blanket treatment data (if this is set, you don't need the -LP)
-sym SYM SIZE, symbol, size for continuous points (e.g., ro 5, bs 10, g^ 10 for red dot, blue square, green triangle), default is blue dot at 5 pt
-D do not plot declination
-M do not plot magnetization
-log plot magnetization on a log scale
-L do not connect dots with a line
-I do not plot inclination
-d min max [in m] depth range to plot
-n normalize by weight in er_specimen table
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04, gts12]
-ds [mbsf,mcd] specify depth scale, mbsf default
-fmt [svg, eps, pdf, png] specify output format for plot (default: svg)
-sav save plot silently
DEFAULTS:
Measurements file: measurements.txt
Samples file: samples.txt
NRM step
Summary file: none
"""
args = sys.argv
if '-h' in args:
print(main.__doc__)
sys.exit()
dataframe = extractor.command_line_dataframe([ ['f', False, 'measurements.txt'], ['fsum', False, ''],
['fwig', False, ''], ['fsa', False, ''],
['fa', False, ''], ['fsp', False, ''],
['fres', False, '' ], ['fmt', False, 'svg'],
['LP', False, ''], ['n', False, False],
['d', False, '-1 -1'], ['ts', False, ''],
['WD', False, '.'], ['L', False, True],
['S', False, True], ['D', False, True],
['I', False, True], ['M', False, True],
['log', False, 0],
['ds', False, 'sample_core_depth'],
['sym', False, 'bo 5'], ['ID', False, '.'],
['sav', False, False], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol, input_dir, save, data_model_num = extractor.get_vars(
['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym', 'ID', 'sav', 'DM'], checked_args)
# format some variables
# format symbol/size
try:
sym, size = symbol.split()
size = int(size)
except:
print('you should provide -sym in this format: ro 5')
print('using defaults instead')
sym, size = 'ro', 5
# format result file, symbol, size
if res_file:
try:
res_file, res_sym, res_size = res_file.split()
except:
print('you must provide -fres in this format: -fres filename symbol size')
print(
'could not parse {}, defaulting to using no result file'.format(res_file))
res_file, res_sym, res_size = '', '', 0
else:
res_file, res_sym, res_size = '', '', 0
# format specimen file, symbol, size
if spc_file:
try:
spc_file, spc_sym, spc_size = spc_file.split()
except:
print('you must provide -fsp in this format: -fsp filename symbol size')
print(
'could not parse {}, defaulting to using no specimen file'.format(spc_file))
spc_file, spc_sym, spc_size = '', '', 0
else:
spc_file, spc_sym, spc_size = '', '', 0
# format min/max depth
try:
dmin, dmax = depth.split()
except:
print('you must provide -d in this format: -d dmin dmax')
print('could not parse {}, defaulting to plotting all depths'.format(depth))
dmin, dmax = -1, -1
# format timescale, min/max time
if timescale:<|fim▁hole|> timescale, amin, amax = timescale.split()
pltTime = True
except:
print(
'you must provide -ts in this format: -ts timescale minimum_age maximum_age')
print(
'could not parse {}, defaulting to using no timescale'.format(timescale))
timescale, amin, amax = None, -1, -1
pltTime = False
else:
timescale, amin, amax = None, -1, -1
pltTime = False
# format norm and wt_file
if norm and not isinstance(norm, bool):
wt_file = norm
norm = True
else:
norm = False
wt_file = ''
# format list of protcols and step
try:
method, step = meth.split()
except:
print(
'To use the -LP flag you must provide both the protocol and the step in this format:\n-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot')
print('Defaulting to using no protocol')
method, step = 'LT-NO', 0
# list of varnames
#['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym' ]
#meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol
fig, figname = ipmag.core_depthplot(input_dir, meas_file, spc_file, samp_file, age_file, sum_file, wt_file, depth_scale, dmin, dmax, sym, size,
spc_sym, spc_size, method, step, fmt, pltDec, pltInc, pltMag, pltLine, pltSus, logit, pltTime, timescale, amin, amax, norm, data_model_num)
if not pmagplotlib.isServer:
figname = figname.replace(':', '_')
if fig and save:
print('-I- Created plot: {}'.format(figname))
plt.savefig(figname)
return
app = wx.App(redirect=False)
if not fig:
pw.simple_warning(
'No plot was able to be created with the data you provided.\nMake sure you have given all the required information and try again')
return False
dpi = fig.get_dpi()
pixel_width = dpi * fig.get_figwidth()
pixel_height = dpi * fig.get_figheight()
figname = os.path.join(dir_path, figname)
plot_frame = pmag_menu_dialogs.PlotFrame((int(pixel_width), int(pixel_height + 50)),
fig, figname, standalone=True)
app.MainLoop()
if __name__ == "__main__":
main()<|fim▁end|>
|
try:
|
<|file_name|>cast_lossless.rs<|end_file_name|><|fim▁begin|>use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::in_constant;
use clippy_utils::source::snippet_opt;
use clippy_utils::ty::is_isize_or_usize;
use rustc_errors::Applicability;<|fim▁hole|>use rustc_hir::{Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use super::{utils, CAST_LOSSLESS};
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if !should_lint(cx, expr, cast_from, cast_to) {
return;
}
// The suggestion is to use a function call, so if the original expression
// has parens on the outside, they are no longer needed.
let mut applicability = Applicability::MachineApplicable;
let opt = snippet_opt(cx, cast_op.span);
let sugg = opt.as_ref().map_or_else(
|| {
applicability = Applicability::HasPlaceholders;
".."
},
|snip| {
if should_strip_parens(cast_op, snip) {
&snip[1..snip.len() - 1]
} else {
snip.as_str()
}
},
);
span_lint_and_sugg(
cx,
CAST_LOSSLESS,
expr.span,
&format!(
"casting `{}` to `{}` may become silently lossy if you later change the type",
cast_from, cast_to
),
"try",
format!("{}::from({})", cast_to, sugg),
applicability,
);
}
fn should_lint(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) -> bool {
// Do not suggest using From in consts/statics until it is valid to do so (see #2267).
if in_constant(cx, expr.hir_id) {
return false;
}
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
let cast_signed_to_unsigned = cast_from.is_signed() && !cast_to.is_signed();
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
!is_isize_or_usize(cast_from)
&& !is_isize_or_usize(cast_to)
&& from_nbits < to_nbits
&& !cast_signed_to_unsigned
},
(true, false) => {
let from_nbits = utils::int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.kind() {
32
} else {
64
};
from_nbits < to_nbits
},
(_, _) => {
matches!(cast_from.kind(), ty::Float(FloatTy::F32)) && matches!(cast_to.kind(), ty::Float(FloatTy::F64))
},
}
}
fn should_strip_parens(cast_expr: &Expr<'_>, snip: &str) -> bool {
if let ExprKind::Binary(_, _, _) = cast_expr.kind {
if snip.starts_with('(') && snip.ends_with(')') {
return true;
}
}
false
}<|fim▁end|>
| |
<|file_name|>AccountingTransaction.java<|end_file_name|><|fim▁begin|>/**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.domain.accounting;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import org.apache.commons.lang.StringUtils;
import org.fenixedu.academic.domain.Person;
import org.fenixedu.academic.domain.exceptions.DomainException;
import org.fenixedu.academic.domain.exceptions.DomainExceptionWithLabelFormatter;
import org.fenixedu.academic.domain.organizationalStructure.Party;
import org.fenixedu.academic.util.LabelFormatter;
import org.fenixedu.academic.util.Money;
import org.fenixedu.bennu.core.domain.Bennu;
import org.fenixedu.bennu.core.domain.User;
import org.fenixedu.bennu.core.security.Authenticate;
import org.fenixedu.bennu.core.signals.DomainObjectEvent;
import org.fenixedu.bennu.core.signals.Signal;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.YearMonthDay;
/**
* Two-ledged accounting transaction
*
* @author naat
*
*/
public class AccountingTransaction extends AccountingTransaction_Base {
public static final String SIGNAL_ANNUL = AccountingTransaction.class.getName() + ".annul";
public static Comparator<AccountingTransaction> COMPARATOR_BY_WHEN_REGISTERED =
(leftAccountingTransaction, rightAccountingTransaction) -> {
int comparationResult =
leftAccountingTransaction.getWhenRegistered().compareTo(rightAccountingTransaction.getWhenRegistered());
return (comparationResult == 0) ? leftAccountingTransaction.getExternalId().compareTo(
rightAccountingTransaction.getExternalId()) : comparationResult;
};
protected AccountingTransaction() {
super();
super.setRootDomainObject(Bennu.getInstance());
}
public AccountingTransaction(User responsibleUser, Event event, Entry debit, Entry credit,
AccountingTransactionDetail transactionDetail) {
this();
init(responsibleUser, event, debit, credit, transactionDetail);
}
private AccountingTransaction(User responsibleUser, Entry debit, Entry credit, AccountingTransactionDetail transactionDetail,
AccountingTransaction transactionToAdjust) {
this();
init(responsibleUser, transactionToAdjust.getEvent(), debit, credit, transactionDetail, transactionToAdjust);
}
protected void init(User responsibleUser, Event event, Entry debit, Entry credit,
AccountingTransactionDetail transactionDetail) {
init(responsibleUser, event, debit, credit, transactionDetail, null);
}
protected void init(User responsibleUser, Event event, Entry debit, Entry credit,
AccountingTransactionDetail transactionDetail, AccountingTransaction transactionToAdjust) {
checkParameters(event, debit, credit);
// check for operations after registered date
List<String> operationsAfter = event.getOperationsAfter(transactionDetail.getWhenProcessed());
if (!operationsAfter.isEmpty()) {
throw new DomainException("error.accounting.AccountingTransaction.cannot.create.transaction",
String.join(",", operationsAfter));
}
super.setEvent(event);
super.setResponsibleUser(responsibleUser);
super.addEntries(debit);
super.addEntries(credit);
super.setAdjustedTransaction(transactionToAdjust);
super.setTransactionDetail(transactionDetail);
}
private void checkParameters(Event event, Entry debit, Entry credit) {
if (event == null) {
throw new DomainException("error.accounting.accountingTransaction.event.cannot.be.null");
}
if (debit == null) {
throw new DomainException("error.accounting.accountingTransaction.debit.cannot.be.null");
}
if (credit == null) {
throw new DomainException("error.accounting.accountingTransaction.credit.cannot.be.null");
}
}
@Override
public void addEntries(Entry entries) {
throw new DomainException("error.accounting.accountingTransaction.cannot.add.entries");
}
@Override
public Set<Entry> getEntriesSet() {
return Collections.unmodifiableSet(super.getEntriesSet());
}
@Override
public void removeEntries(Entry entries) {
throw new DomainException("error.accounting.accountingTransaction.cannot.remove.entries");
}
@Override
public void setEvent(Event event) {
super.setEvent(event);
}
@Override
public void setResponsibleUser(User responsibleUser) {
throw new DomainException("error.accounting.accountingTransaction.cannot.modify.responsibleUser");
}
@Override
public void setAdjustedTransaction(AccountingTransaction adjustedTransaction) {
throw new DomainException("error.accounting.accountingTransaction.cannot.modify.adjustedTransaction");
}
@Override
public void setTransactionDetail(AccountingTransactionDetail transactionDetail) {
throw new DomainException("error.accounting.AccountingTransaction.cannot.modify.transactionDetail");
}
@Override
public void addAdjustmentTransactions(AccountingTransaction accountingTransaction) {
throw new DomainException(
"error.org.fenixedu.academic.domain.accounting.AccountingTransaction.cannot.add.accountingTransaction");
}
@Override
public Set<AccountingTransaction> getAdjustmentTransactionsSet() {
return Collections.unmodifiableSet(super.getAdjustmentTransactionsSet());
}
public Stream<AccountingTransaction> getAdjustmentTransactionStream() {
return super.getAdjustmentTransactionsSet().stream();
}
@Override
public void removeAdjustmentTransactions(AccountingTransaction adjustmentTransactions) {
throw new DomainException(
"error.org.fenixedu.academic.domain.accounting.AccountingTransaction.cannot.remove.accountingTransaction");
}
public LabelFormatter getDescriptionForEntryType(EntryType entryType) {
return getEvent().getDescriptionForEntryType(entryType);
}
public Account getFromAccount() {
return getEntry(false).getAccount();
}
public Account getToAccount() {
return getEntry(true).getAccount();
}
public Entry getToAccountEntry() {
return getEntry(true);
}
public Entry getFromAccountEntry() {
return getEntry(false);
}
private Entry getEntry(boolean positive) {
for (final Entry entry : super.getEntriesSet()) {<|fim▁hole|> }
throw new DomainException("error.accounting.accountingTransaction.transaction.data.is.corrupted");
}
public AccountingTransaction reimburse(User responsibleUser, PaymentMethod paymentMethod,String
paymentReference, Money amountToReimburse) {
return reimburse(responsibleUser, paymentMethod,paymentReference, amountToReimburse, null);
}
public AccountingTransaction reimburse(User responsibleUser, PaymentMethod paymentMethod, String
paymentReference, Money amountToReimburse, String comments) {
return reimburse(responsibleUser, paymentMethod, paymentReference, amountToReimburse, comments, true);
}
public AccountingTransaction reimburse(User responsibleUser, PaymentMethod paymentMethod, String
paymentReference, Money amountToReimburse,
DateTime reimburseDate, String comments) {
return reimburse(responsibleUser, paymentMethod, paymentReference, amountToReimburse, comments, true, reimburseDate);
}
public AccountingTransaction reimburseWithoutRules(User responsibleUser, PaymentMethod paymentMethod, String
paymentReference, Money amountToReimburse) {
return reimburseWithoutRules(responsibleUser, paymentMethod, paymentReference, amountToReimburse, null);
}
public AccountingTransaction reimburseWithoutRules(User responsibleUser, PaymentMethod paymentMethod, String
paymentReference, Money amountToReimburse,
String comments) {
return reimburse(responsibleUser, paymentMethod, paymentReference, amountToReimburse, comments, false);
}
public void annul(final User responsibleUser, final String reason) {
if (StringUtils.isEmpty(reason)) {
throw new DomainException(
"error.org.fenixedu.academic.domain.accounting.AccountingTransaction.cannot.annul.without.reason");
}
checkRulesToAnnul();
annulReceipts();
Signal.emit(SIGNAL_ANNUL, new DomainObjectEvent<AccountingTransaction>(this));
reimburseWithoutRules(responsibleUser, getTransactionDetail().getPaymentMethod(), getTransactionDetail()
.getPaymentReference(), getAmountWithAdjustment(), reason);
}
private void checkRulesToAnnul() {
final List<String> operationsAfter = getEvent().getOperationsAfter(getWhenProcessed());
if (!operationsAfter.isEmpty()) {
throw new DomainException("error.accounting.AccountingTransaction.cannot.annul.operations.after",
String.join(",", operationsAfter));
}
}
private void annulReceipts() {
getToAccountEntry().getReceiptsSet().stream().filter(Receipt::isActive).forEach(r -> {
Person responsible = Optional.ofNullable(Authenticate.getUser()).map(User::getPerson).orElse(null);
r.annul(responsible);
});
}
private AccountingTransaction reimburse(User responsibleUser, PaymentMethod paymentMethod, String paymentReference, Money
amountToReimburse,
String comments, boolean checkRules) {
return reimburse(responsibleUser, paymentMethod, paymentReference, amountToReimburse, comments, checkRules, new DateTime
());
}
private AccountingTransaction reimburse(User responsibleUser, PaymentMethod paymentMethod, String paymentReference, Money
amountToReimburse,
String comments, boolean checkRules, DateTime reimburseDate) {
if (checkRules && !canApplyReimbursement(amountToReimburse)) {
throw new DomainException("error.accounting.AccountingTransaction.cannot.reimburse.events.that.may.open");
}
if (!getToAccountEntry().canApplyReimbursement(amountToReimburse)) {
throw new DomainExceptionWithLabelFormatter(
"error.accounting.AccountingTransaction.amount.to.reimburse.exceeds.entry.amount", getToAccountEntry()
.getDescription());
}
final AccountingTransaction transaction =
new AccountingTransaction(responsibleUser, new Entry(EntryType.ADJUSTMENT, amountToReimburse.negate(),
getToAccount()), new Entry(EntryType.ADJUSTMENT, amountToReimburse, getFromAccount()),
new AccountingTransactionDetail(reimburseDate, paymentMethod, paymentReference, comments), this);
getEvent().recalculateState(new DateTime());
return transaction;
}
public DateTime getWhenRegistered() {
return getTransactionDetail().getWhenRegistered();
}
public DateTime getWhenProcessed() {
return getTransactionDetail().getWhenProcessed();
}
public String getComments() {
return getTransactionDetail().getComments();
}
public boolean isPayed(final int civilYear) {
return getWhenRegistered().getYear() == civilYear;
}
public boolean isAdjustingTransaction() {
return getAdjustedTransaction() != null;
}
public boolean hasBeenAdjusted() {
return !super.getAdjustmentTransactionsSet().isEmpty();
}
public Entry getEntryFor(final Account account) {
for (final Entry accountingEntry : super.getEntriesSet()) {
if (accountingEntry.getAccount() == account) {
return accountingEntry;
}
}
throw new DomainException(
"error.accounting.accountingTransaction.transaction.data.is.corrupted.because.no.entry.belongs.to.account");
}
private boolean canApplyReimbursement(final Money amount) {
return getEvent().canApplyReimbursement(amount);
}
public boolean isSourceAccountFromParty(Party party) {
return getFromAccount().getParty() == party;
}
@Override
protected void checkForDeletionBlockers(Collection<String> blockers) {
super.checkForDeletionBlockers(blockers);
blockers.addAll(getEvent().getOperationsAfter(getWhenProcessed()));
}
public void delete() {
DomainException.throwWhenDeleteBlocked(getDeletionBlockers());
super.setAdjustedTransaction(null);
for (; !getAdjustmentTransactionsSet().isEmpty(); getAdjustmentTransactionsSet().iterator().next().delete()) {
;
}
if (getTransactionDetail() != null) {
getTransactionDetail().delete();
}
for (; !getEntriesSet().isEmpty(); getEntriesSet().iterator().next().delete()) {
;
}
super.setResponsibleUser(null);
super.setEvent(null);
setRootDomainObject(null);
super.deleteDomainObject();
}
public Money getAmountWithAdjustment() {
return getToAccountEntry().getAmountWithAdjustment();
}
public boolean isInsidePeriod(final YearMonthDay startDate, final YearMonthDay endDate) {
return isInsidePeriod(startDate.toLocalDate(), endDate.toLocalDate());
}
public boolean isInsidePeriod(final LocalDate startDate, final LocalDate endDate) {
return !getWhenRegistered().toLocalDate().isBefore(startDate) && !getWhenRegistered().toLocalDate().isAfter(endDate);
}
public boolean isInstallment() {
return false;
}
public PaymentMethod getPaymentMethod() {
return getTransactionDetail().getPaymentMethod();
}
public Money getOriginalAmount() {
return getToAccountEntry().getOriginalAmount();
}
}<|fim▁end|>
|
if (entry.isPositiveAmount() == positive) {
return entry;
}
|
<|file_name|>App.js<|end_file_name|><|fim▁begin|>import React, {useState} from 'react';
import {
ScrollView,
StyleSheet,
Text,
TouchableOpacity,
Platform,
Linking,
} from 'react-native';
import AutoHeightWebView from 'react-native-autoheight-webview';
import {
autoHeightHtml0,
autoHeightHtml1,
autoHeightScript,
autoWidthHtml0,
autoWidthHtml1,
autoWidthScript,
autoDetectLinkScript,
style0,
inlineBodyStyle,
} from './config';
const onShouldStartLoadWithRequest = result => {
console.log(result);
return true;
};
const onError = ({nativeEvent}) =>
console.error('WebView error: ', nativeEvent);
const onMessage = event => {
const {data} = event.nativeEvent;
let messageData;
// maybe parse stringified JSON
try {
messageData = JSON.parse(data);
} catch (e) {
console.log(e.message);
}
if (typeof messageData === 'object') {
const {url} = messageData;
// check if this message concerns us
if (url && url.startsWith('http')) {
Linking.openURL(url).catch(error =>
console.error('An error occurred', error),
);
}<|fim▁hole|> }
};
const onHeightLoadStart = () => console.log('height on load start');
const onHeightLoad = () => console.log('height on load');
const onHeightLoadEnd = () => console.log('height on load end');
const onWidthLoadStart = () => console.log('width on load start');
const onWidthLoad = () => console.log('width on load');
const onWidthLoadEnd = () => console.log('width on load end');
const Explorer = () => {
const [{widthHtml, heightHtml}, setHtml] = useState({
widthHtml: autoWidthHtml0,
heightHtml: autoHeightHtml0,
});
const changeSource = () =>
setHtml({
widthHtml: widthHtml === autoWidthHtml0 ? autoWidthHtml1 : autoWidthHtml0,
heightHtml:
heightHtml === autoHeightHtml0 ? autoHeightHtml1 : autoHeightHtml0,
});
const [{widthStyle, heightStyle}, setStyle] = useState({
heightStyle: null,
widthStyle: inlineBodyStyle,
});
const changeStyle = () =>
setStyle({
widthStyle:
widthStyle === inlineBodyStyle
? style0 + inlineBodyStyle
: inlineBodyStyle,
heightStyle: heightStyle === null ? style0 : null,
});
const [{widthScript, heightScript}, setScript] = useState({
heightScript: autoDetectLinkScript,
widthScript: null,
});
const changeScript = () =>
setScript({
widthScript: widthScript == autoWidthScript ? autoWidthScript : null,
heightScript:
heightScript !== autoDetectLinkScript
? autoDetectLinkScript
: autoHeightScript + autoDetectLinkScript,
});
const [heightSize, setHeightSize] = useState({height: 0, width: 0});
const [widthSize, setWidthSize] = useState({height: 0, width: 0});
return (
<ScrollView
style={{
paddingTop: 45,
backgroundColor: 'lightyellow',
}}
contentContainerStyle={{
justifyContent: 'center',
alignItems: 'center',
}}>
<AutoHeightWebView
customStyle={heightStyle}
onError={onError}
onLoad={onHeightLoad}
onLoadStart={onHeightLoadStart}
onLoadEnd={onHeightLoadEnd}
onShouldStartLoadWithRequest={onShouldStartLoadWithRequest}
onSizeUpdated={setHeightSize}
source={{html: heightHtml}}
customScript={heightScript}
onMessage={onMessage}
/>
<Text style={{padding: 5}}>
height: {heightSize.height}, width: {heightSize.width}
</Text>
<AutoHeightWebView
style={{
marginTop: 15,
}}
customStyle={widthStyle}
onError={onError}
onLoad={onWidthLoad}
onLoadStart={onWidthLoadStart}
onLoadEnd={onWidthLoadEnd}
onShouldStartLoadWithRequest={onShouldStartLoadWithRequest}
onSizeUpdated={setWidthSize}
allowFileAccessFromFileURLs={true}
allowUniversalAccessFromFileURLs={true}
source={{
html: widthHtml,
baseUrl:
Platform.OS === 'android' ? 'file:///android_asset/' : 'web/',
}}
customScript={widthScript}
/>
<Text style={{padding: 5}}>
height: {widthSize.height}, width: {widthSize.width}
</Text>
<TouchableOpacity onPress={changeSource} style={styles.button}>
<Text>change source</Text>
</TouchableOpacity>
<TouchableOpacity onPress={changeStyle} style={styles.button}>
<Text>change style</Text>
</TouchableOpacity>
<TouchableOpacity
onPress={changeScript}
style={[styles.button, {marginBottom: 100}]}>
<Text>change script</Text>
</TouchableOpacity>
</ScrollView>
);
};
const styles = StyleSheet.create({
button: {
marginTop: 15,
backgroundColor: 'aliceblue',
borderRadius: 5,
padding: 5,
},
});
export default Explorer;<|fim▁end|>
| |
<|file_name|>mut-function-arguments.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f(mut y: ~int) {
*y = 5;
assert!(*y == 5);
}
fn g() {
let frob: &fn(~int) = |mut q| { *q = 2; assert!(*q == 2); };
let w = ~37;
frob(w);
}
pub fn main() {
let z = ~17;<|fim▁hole|> g();
}<|fim▁end|>
|
f(z);
|
<|file_name|>test_pickle.py<|end_file_name|><|fim▁begin|>import io
import os
import pickle
import sys
import tempfile
import unittest
import numpy as np
import pystan
from pystan.tests.helper import get_model
from pystan.experimental import unpickle_fit
class TestPickle(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.pickle_file = os.path.join(tempfile.mkdtemp(), 'stanmodel.pkl')
cls.model_code = 'parameters {real y;} model {y ~ normal(0,1);}'
def test_pickle_model(self):
pickle_file = self.pickle_file
model_code = self.model_code
m = pystan.StanModel(model_code=model_code, model_name="normal2")
module_name = m.module.__name__
module_filename = m.module.__file__
with open(pickle_file, 'wb') as f:
pickle.dump(m, f)
del m
del sys.modules[module_name]
with open(pickle_file, 'rb') as f:
m = pickle.load(f)
self.assertTrue(m.model_name.startswith("normal2"))
self.assertIsNotNone(m.module)
if not sys.platform.startswith('win'):
# will fail on Windows
self.assertNotEqual(module_filename, m.module.__file__)
fit = m.sampling()
y = fit.extract()['y']
assert len(y) == 4000
def test_pickle_fit(self):
model_code = 'parameters {real y;} model {y ~ normal(0,1);}'
sm = pystan.StanModel(model_code=model_code, model_name="normal1")
# additional error checking
fit = sm.sampling(iter=100)
y = fit.extract()['y'].copy()
self.assertIsNotNone(y)
# pickle
pickled_model = pickle.dumps(sm)
module_name = sm.module.__name__
del sm
pickled_fit = pickle.dumps(fit)
del fit
# unload module
if module_name in sys.modules:
del(sys.modules[module_name])
# load from file
sm_from_pickle = pickle.loads(pickled_model)
fit_from_pickle = pickle.loads(pickled_fit)
self.assertIsNotNone(fit_from_pickle)
self.assertTrue((fit_from_pickle.extract()['y'] == y).all())
def test_pickle_model_and_reload(self):
pickle_file = self.pickle_file
pickle_file2 = os.path.join(tempfile.mkdtemp(), 'stanmodel.pkl')
model_code = self.model_code
model = pystan.StanModel(model_code=model_code, model_name="normal1")
with open(pickle_file, 'wb') as f:
pickle.dump(model, f)
with open(pickle_file2, 'wb') as f:
pickle.dump(model, f)
del model
with open(pickle_file, 'rb') as f:
model_from_pickle = pickle.load(f)
self.assertIsNotNone(model_from_pickle.sampling(iter=100).extract())
with open(pickle_file2, 'rb') as f:
model_from_pickle = pickle.load(f)
self.assertIsNotNone(model_from_pickle.sampling(iter=100).extract())
def test_model_unique_names(self):
model_code = self.model_code
model1 = pystan.StanModel(model_code=model_code, model_name="normal1")
model2 = pystan.StanModel(model_code=model_code, model_name="normal1")
self.assertNotEqual(model1.module_name, model2.module_name)
class TestPickleFitOnly(unittest.TestCase):
@classmethod
def setUpClass(cls):
model_code = 'parameters {real y;} model {y ~ normal(0,1);}'
model = get_model("standard_normal_model",
model_code, model_name="normal1",
verbose=True, obfuscate_model_name=False)
fit = model.sampling()
tempfolder = tempfile.mkdtemp()
cls.pickle_fit = os.path.join(tempfolder, 'stanfit.pkl')
cls.pickle_extract = os.path.join(tempfolder, 'stanextract.pkl')
with io.open(cls.pickle_fit, mode="wb") as f:
pickle.dump(fit, f)
with io.open(cls.pickle_extract, mode="wb") as f:
pickle.dump(fit.extract(), f)
module_name = model.module.__name__
del model
del sys.modules[module_name]
@unittest.expectedFailure
def test_unpickle_fit_fail(self):
with io.open(self.pickle_file, "rb") as f:
pickle.load(f)<|fim▁hole|>
def test_load_fit(self):
fit, model = unpickle_fit(self.pickle_fit, open_func=io.open, open_kwargs={"mode" : "rb"}, return_model=True)
self.assertIsNotNone(fit)
self.assertIsNotNone(model)
self.assertIsNotNone(fit.extract())
self.assertTrue("y" in fit.extract())
with io.open(self.pickle_extract, "rb") as f:
extract = pickle.load(f)
self.assertTrue(np.all(fit.extract()["y"] == extract["y"]))<|fim▁end|>
| |
<|file_name|>completionForStringLiteral10.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts'/>
<|fim▁hole|>////type As = 'arf' | 'abacus' | 'abaddon';
////let a: As;
////if ('/**/' != a
verify.completions({ marker: "", exact: ["arf", "abacus", "abaddon"] });<|fim▁end|>
| |
<|file_name|>boilerplate.js<|end_file_name|><|fim▁begin|>// I am well aware this could be so very much tidier,
// we are going fo basic functionality first.
// will tidy up later.
var boilerplate = function() {
this.popup = function(html) {
$('#popup').html(html);
// add X
var close = '<img id="close" src="libs/icons/close.png"></img>';
$('#popup').append(close);
$('#close').on('click', function() { $('#magic_positioning_table').hide(); $('#overlay').hide(); });
// add overlay
$('#overlay').show();
$('#magic_positioning_table').show();
};
// for ajax popup use: $('#popup').load(URL, function(html) { popup(html); });
};
$(document).ready(function() {
// add in hamburger menu to open aside if aside is present
if ($('#aside').length>0) {
var hamburger = '<img id="hamburger" src="libs/icons/hamburger.png"></img>';
$('#header').append(hamburger);
$('#hamburger').on('click', function() {
if (!$('#aside').is(':visible')) {
$('#aside').show();
$('#header, #footer, #main').transition({ 'left': $('#aside').width() }, 300);
} else {
$('#header, #footer, #main').transition({ 'left': '0' }, 300, function() {
$('#aside').hide(); // in case there is no BG color on the main area's
});
}
});
// also swipe right to open
$$('#main').swipeRight(function(){
if (!$('#aside').is(':visible')) {
$('#aside').show();<|fim▁hole|> }
});
$$('#main, #aside').swipeLeft(function() {
if ($('#aside').is(':visible')) {
$('#header, #footer, #main').transition({ 'left': '0' }, 300, function() {
$('#aside').hide(); // in case there is no BG color on the main area's
});
}
});
}
// add in the modal popup div (and overlay)
var popup = '<div id="popup" class="white"></div>';
var overlay = '<div id="overlay"></div>';
var magic_positioning_table = '<table id="magic_positioning_table">'+
'<tr><td colspan="4"></td></tr>' +
'<tr><td></td><td class="popupcell"></td><td></td></tr>' +
'<tr><td colspan="4"></td></tr>' +
'</table>';
$('body').append(popup);
$('body').append(overlay);
$('body').append(magic_positioning_table);
$('#popup').appendTo('#magic_positioning_table td.popupcell');
// apply the landscape-fullscreen class - removal of which
// allows turning off the full screen on rotate behaviour
// HAVE TURNED THIS OFF - CONFUSING AND ALSO CAUSES ISSUE WHEN NO ASSIDE - BUT A NEAT IDEA STILL
//$('#header, #footer, #main, #aside, #popup').addClass('landscape-fullscreen');
});<|fim▁end|>
|
$('#header, #footer, #main').transition({ 'left': $('#aside').width() }, 300);
|
<|file_name|>test_v10.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import oslo_messaging
import six
import testtools
from sahara import conductor as cond
from sahara import context
from sahara import exceptions as exc
from sahara.plugins import base as pl_base
from sahara.plugins import provisioning as pr_base
from sahara.service import api as service_api
from sahara.service.api import v10 as api
from sahara.tests.unit import base
from sahara.utils import cluster as c_u
conductor = cond.API
SAMPLE_CLUSTER = {
'plugin_name': 'fake',
'hadoop_version': 'test_version',
'tenant_id': 'tenant_1',
'name': 'test_cluster',
'user_keypair_id': 'my_keypair',
'node_groups': [
{
'auto_security_group': True,
'name': 'ng_1',
'flavor_id': '42',
'node_processes': ['p1', 'p2'],
'count': 1
},
{
'auto_security_group': False,
'name': 'ng_2',
'flavor_id': '42',
'node_processes': ['p3', 'p4'],
'count': 3
},
{
'auto_security_group': False,
'name': 'ng_3',
'flavor_id': '42',
'node_processes': ['p3', 'p4'],
'count': 1
}
],
'cluster_configs': {
'service_1': {
'config_2': 'value_2'
},
'service_2': {
'config_1': 'value_1'
}
},
}
SCALE_DATA = {
'resize_node_groups': [
{
'name': 'ng_1',
'count': 3,
},
{
'name': 'ng_2',
'count': 2,
}
],
'add_node_groups': [
{
'auto_security_group': True,
'name': 'ng_4',
'flavor_id': '42',
'node_processes': ['p1', 'p2'],
'count': 1
},
]
}
class FakePlugin(pr_base.ProvisioningPluginBase):
_info = {}
name = "fake"
def __init__(self, calls_order):
self.calls_order = calls_order
def configure_cluster(self, cluster):
pass
def start_cluster(self, cluster):
pass
def get_description(self):
return "Some description"
def get_title(self):
return "Fake plugin"
def validate(self, cluster):
self.calls_order.append('validate')
def get_open_ports(self, node_group):
self.calls_order.append('get_open_ports')
def validate_scaling(self, cluster, to_be_enlarged, additional):
self.calls_order.append('validate_scaling')
def get_versions(self):
return ['0.1', '0.2']
def get_node_processes(self, version):
return {'HDFS': ['namenode', 'datanode']}
def get_configs(self, version):
return []
def recommend_configs(self, cluster, scaling=False):
self.calls_order.append('recommend_configs')
class FakePluginManager(pl_base.PluginManager):
def __init__(self, calls_order):
super(FakePluginManager, self).__init__()
self.plugins['fake'] = FakePlugin(calls_order)
class FakeOps(object):
def __init__(self, calls_order):
self.calls_order = calls_order
def provision_cluster(self, id):
self.calls_order.append('ops.provision_cluster')
conductor.cluster_update(
context.ctx(), id, {'status': c_u.CLUSTER_STATUS_ACTIVE})
def provision_scaled_cluster(self, id, to_be_enlarged):
self.calls_order.append('ops.provision_scaled_cluster')
# Set scaled to see difference between active and scaled
for (ng, count) in six.iteritems(to_be_enlarged):
conductor.node_group_update(context.ctx(), ng, {'count': count})
conductor.cluster_update(context.ctx(), id, {'status': 'Scaled'})
def terminate_cluster(self, id):
self.calls_order.append('ops.terminate_cluster')
class TestApi(base.SaharaWithDbTestCase):
def setUp(self):
super(TestApi, self).setUp()
self.calls_order = []
self.override_config('plugins', ['fake'])
pl_base.PLUGINS = FakePluginManager(self.calls_order)
service_api.setup_api(FakeOps(self.calls_order))
oslo_messaging.notify.notifier.Notifier.info = mock.Mock()
self.ctx = context.ctx()
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
def test_create_cluster_success(self, check_cluster):
cluster = api.create_cluster(SAMPLE_CLUSTER)
self.assertEqual(1, check_cluster.call_count)
result_cluster = api.get_cluster(cluster.id)
self.assertEqual(c_u.CLUSTER_STATUS_ACTIVE, result_cluster.status)
expected_count = {
'ng_1': 1,
'ng_2': 3,
'ng_3': 1,
}
ng_count = 0
for ng in result_cluster.node_groups:
self.assertEqual(expected_count[ng.name], ng.count)
ng_count += 1
self.assertEqual(3, ng_count)
api.terminate_cluster(result_cluster.id)
self.assertEqual(
['get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster',
'ops.terminate_cluster'], self.calls_order)
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
def test_create_multiple_clusters_success(self, check_cluster):
MULTIPLE_CLUSTERS = SAMPLE_CLUSTER.copy()
MULTIPLE_CLUSTERS['count'] = 2
clusters = api.create_multiple_clusters(MULTIPLE_CLUSTERS)
self.assertEqual(2, check_cluster.call_count)
result_cluster1 = api.get_cluster(clusters['clusters'][0])
result_cluster2 = api.get_cluster(clusters['clusters'][1])
self.assertEqual(c_u.CLUSTER_STATUS_ACTIVE, result_cluster1.status)
self.assertEqual(c_u.CLUSTER_STATUS_ACTIVE, result_cluster2.status)
expected_count = {
'ng_1': 1,
'ng_2': 3,
'ng_3': 1,
}
ng_count = 0
for ng in result_cluster1.node_groups:
self.assertEqual(expected_count[ng.name], ng.count)
ng_count += 1
self.assertEqual(3, ng_count)
api.terminate_cluster(result_cluster1.id)
api.terminate_cluster(result_cluster2.id)
self.assertEqual(
['get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster',
'get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster',
'ops.terminate_cluster',
'ops.terminate_cluster'], self.calls_order)
@mock.patch('sahara.service.quotas.check_cluster')
def test_create_multiple_clusters_failed(self, check_cluster):
MULTIPLE_CLUSTERS = SAMPLE_CLUSTER.copy()
MULTIPLE_CLUSTERS['count'] = 2
check_cluster.side_effect = exc.QuotaException(
'resource', 'requested', 'available')
with testtools.ExpectedException(exc.QuotaException):
api.create_cluster(SAMPLE_CLUSTER)
self.assertEqual('Error', api.get_clusters()[0].status)
@mock.patch('sahara.service.quotas.check_cluster')
def test_create_cluster_failed(self, check_cluster):
check_cluster.side_effect = exc.QuotaException(
'resource', 'requested', 'available')
with testtools.ExpectedException(exc.QuotaException):
api.create_cluster(SAMPLE_CLUSTER)
self.assertEqual('Error', api.get_clusters()[0].status)
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
@mock.patch('sahara.service.quotas.check_scaling', return_value=None)
def test_scale_cluster_success(self, check_scaling, check_cluster):
cluster = api.create_cluster(SAMPLE_CLUSTER)
api.scale_cluster(cluster.id, SCALE_DATA)
result_cluster = api.get_cluster(cluster.id)
self.assertEqual('Scaled', result_cluster.status)
expected_count = {
'ng_1': 3,
'ng_2': 2,
'ng_3': 1,
'ng_4': 1,
}
ng_count = 0
for ng in result_cluster.node_groups:
self.assertEqual(expected_count[ng.name], ng.count)
ng_count += 1
self.assertEqual(4, ng_count)
api.terminate_cluster(result_cluster.id)
self.assertEqual(
['get_open_ports', 'recommend_configs', 'validate',
'ops.provision_cluster', 'get_open_ports', 'get_open_ports',
'recommend_configs', 'validate_scaling',
'ops.provision_scaled_cluster',
'ops.terminate_cluster'], self.calls_order)
@mock.patch('sahara.service.quotas.check_cluster', return_value=None)
@mock.patch('sahara.service.quotas.check_scaling', return_value=None)
def test_scale_cluster_failed(self, check_scaling, check_cluster):
cluster = api.create_cluster(SAMPLE_CLUSTER)
check_scaling.side_effect = exc.QuotaException(
'resource', 'requested', 'available')
with testtools.ExpectedException(exc.QuotaException):
api.scale_cluster(cluster.id, {})
def test_cluster_update(self):
with mock.patch('sahara.service.quotas.check_cluster'):
cluster = api.create_cluster(SAMPLE_CLUSTER)
updated_cluster = api.update_cluster(
cluster.id, {'description': 'Cluster'})
self.assertEqual('Cluster', updated_cluster.description)
def test_get_plugin(self):
# processing to dict
data = api.get_plugin('fake', '0.1').dict
self.assertIsNotNone(data)
self.assertEqual(
len(pr_base.list_of_common_configs()), len(data.get('configs')))
self.assertEqual(['fake', '0.1'], data.get('required_image_tags'))
self.assertEqual(
{'HDFS': ['namenode', 'datanode']}, data.get('node_processes'))
self.assertIsNone(api.get_plugin('fake', '0.3'))
data = api.get_plugin('fake').dict
self.assertIsNotNone(data.get('version_labels'))
self.assertIsNotNone(data.get('plugin_labels'))
del data['plugin_labels']
del data['version_labels']
self.assertEqual({
'description': "Some description",
'name': 'fake',
'title': 'Fake plugin',
'versions': ['0.1', '0.2']}, data)
self.assertIsNone(api.get_plugin('name1', '0.1'))
def test_update_plugin(self):
data = api.get_plugin('fake', '0.1').dict
self.assertIsNotNone(data)
updated = api.update_plugin('fake', values={
'plugin_labels': {'enabled': {'status': False}}}).dict
self.assertFalse(updated['plugin_labels']['enabled']['status'])
updated = api.update_plugin('fake', values={
'plugin_labels': {'enabled': {'status': True}}}).dict
self.assertTrue(updated['plugin_labels']['enabled']['status'])
# restore to original status
updated = api.update_plugin('fake', values={
'plugin_labels': data['plugin_labels']}).dict
self.assertEqual(data['plugin_labels']['enabled']['status'],
updated['plugin_labels']['enabled']['status'])<|fim▁end|>
|
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
|
<|file_name|>build_dataset.py<|end_file_name|><|fim▁begin|>from datetime import datetime
import csv
import pandas
import os
import sys<|fim▁hole|>ticker_reader = csv.reader(ticker_f)
tickers = [r[0] for r in ticker_reader][1:]
ticker_f.close()
tln = len(tickers)
t_1 = datetime.now()
# build full data frame
res = None
for i, t in enumerate(tickers):
t_n = t.split("/")[1]
df = pandas.io.parsers.read_csv("%s.csv" % t_n)
df[t_n] = (df["Close"].shift(1) - df["Close"]) / df["Close"]
df = df[["Date", t_n]]
df.set_index("Date")
if res is None:
res = df
else:
res = res.merge(df, on="Date", how="outer")
print i, i * 100. / tln, datetime.now() - t_1
res = res.dropna(axis=0, int(sys.argv[3])) # drop many missing obs
res = res.dropna(axis=1, int(sys.argv[4])) # drop many missing vars
res = res.dropna()
res.to_csv(sys.argv[5])<|fim▁end|>
|
os.chdir(sys.argv[1])
ticker_f = open(sys.argv[2], "rb")
|
<|file_name|>TemplateType.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2012 Matthias Klass, Johannes Leimer,
* Rico Lieback, Sebastian Gabriel, Lothar Gesslein,
* Alexander Rampp, Kai Weidner
*
* This file is part of the Physalix Enrollment System
*
* Foobar is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Foobar is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Foobar. If not, see <http://www.gnu.org/licenses/>.
*/
package hsa.awp.common.model;
public enum TemplateType {
DRAWN("Losprozedur", "drawMail.vm"),
DRAWN_NO_LUCK("Losprozedur - nichts Zugelost", "noLuckMail.vm"),
FIFO("Fifo-Prozedur", "fifoMail.vm");
private String desc;
private String fileName;
<|fim▁hole|> this.fileName = fileName;
}
public String getDesc() {
return desc;
}
public String getFileName() {
return fileName;
}
}<|fim▁end|>
|
private TemplateType(String desc, String fileName) {
this.desc = desc;
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2010 Eduardo Robles Elvira <edulix AT gmail DOT com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.models import User, UserManager
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.db.models import signals, Avg, Q
from datetime import date
import os
from django.conf import settings
def create_profile_for_user(sender, **kwargs):
'''
This way everytime a User is created, a Profile is created too.
'''
if kwargs['created']:
profile = Profile()
if not kwargs['instance'].__dict__.has_key("birth_date"):
profile.birth_date = date.today()
if not kwargs['instance'].__dict__.has_key("address"):
profile.address = _("address")
profile.__dict__.update(kwargs['instance'].__dict__)
profile.save()
#signals.post_save.connect(create_profile_for_user, sender=User)
class Profile(User):
'''
<<<<<<< HEAD
User with timebank settings.
=======
User with time bank settings.
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
'''
photo = models.ImageField(_("Avatar"), blank=True, null=True,
upload_to=os.path.join(settings.STATIC_DOC_ROOT, "photos"))
<<<<<<< HEAD
birth_date = models.DateField(_("Rojstni datum"), default=date.today())
address = models.CharField(_("Naslov"), max_length=100, default=_("address"))
org_name = models.CharField(_("Ime organizacije"), max_length=30, default=_("org_name"))
first_name1 = models.CharField(_("Ime zastopnika"), max_length=30, default=_("first_name"))
last_name1 = models.CharField(_("Priimek zastopnika"), max_length=30, default=_("last_name"))
email1 = models.CharField(_("E-mail zastopnika"), max_length=30, default=_("email"))
# credits in minutes
balance = models.IntegerField(default=600)
=======
birth_date = models.DateField(_("Birth date"), default=date.today())
address = models.CharField(_("Address"), max_length=100, default=_("address"))
# credits in minutes
balance = models.IntegerField(default=0)
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
def balance_hours(self):
if self.balance % 60 == 0:
return self.balance/60
return self.balance/60.0
<<<<<<< HEAD
description = models.TextField(_("Opis"), max_length=300,
blank=True)
land_line = models.CharField(_("Stacionarni telefon"), max_length=20)<|fim▁hole|>
email_updates = models.BooleanField(_(u"Želim prejemati novice Časovne banke"),
=======
description = models.TextField(_("Personal address"), max_length=300,
blank=True)
land_line = models.CharField(_("Land line"), max_length=20)
mobile_tlf = models.CharField(_("Mobile phone"), max_length=20)
email_updates = models.BooleanField(_("Receive email updates"),
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
default=True)
# Saving the user language allows sending emails to him in his desired
# language (among other things)
<<<<<<< HEAD
lang_code = models.CharField(_("Jezik"), max_length=10, default='')
class Meta:
verbose_name = _("user")
verbose_name_plural = _("users")
=======
lang_code = models.CharField(_("Language Code"), max_length=10, default='')
class Meta:
verbose_name = _("User")
verbose_name_plural = _("Users")
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
def __unicode__(self):
return self.username
# Use UserManager to get the create_user method, etc.
objects = UserManager()
def __eq__(self, value):
return value and self.id == value.id or False
def transfers_pending(self):
'''
Transfers from this user which are not in a final state
'''
from serv.models import Transfer
return Transfer.objects.filter(Q(credits_payee=self) \
| Q(credits_payee=self)).filter(status__in=['r', 'd'])
def karma(self):
'''
Average of the user's transfer scores
'''
karma = self.transfers_received.aggregate(Avg('rating_score'))
if karma['rating_score__avg']:
return int(karma['rating_score__avg'])
else:
return 0<|fim▁end|>
|
mobile_tlf = models.CharField(_("Mobilni telefon"), max_length=20)
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.utils.translation import ugettext as _
class Category(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
order = models.IntegerField(default=1)
class Meta:
verbose_name = _('Category')
verbose_name_plural = _('Categories')
ordering = ['order']
def __unicode__(self):
return self.title
class WMSServer(models.Model):
title = models.CharField(max_length=50)
url = models.URLField()
attribution = models.CharField(max_length=50)
class Meta:
verbose_name = _('WMSServer')
verbose_name_plural = _('WMSServers')
def __unicode__(self):
return self.title
class Layer(models.Model):
WMS_FORMAT_OPTIONS = (<|fim▁hole|> ('image/png', 'image/png'),
('image/jpeg', 'image/jpeg'),
)
title = models.CharField(max_length=100)
category = models.ForeignKey(Category)
visible = models.BooleanField()
category_order = models.IntegerField(default=1)
map_order = models.IntegerField(default=1)
wms_server = models.ForeignKey(WMSServer)
wms_layers = models.CharField(max_length=100)
wms_styles = models.CharField(max_length=100, null=True, blank=True)
wms_format = models.CharField(max_length=10, choices=WMS_FORMAT_OPTIONS, default='image/png')
wms_transparent = models.BooleanField(default=True)
class Meta:
verbose_name = _('Layer')
verbose_name_plural = _('Layers')
ordering = ['category_order']
def __unicode__(self):
return self.title<|fim▁end|>
| |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var express= require('express'),
path= require('path'),
bodyParser = require('body-parser'),
routes = require('./server/config/routes'),
app= express();
app.use(bodyParser.json());
routes(app);
app.use('/', express.static(path.join(__dirname, 'client')));<|fim▁hole|><|fim▁end|>
|
app.listen(8080);
console.log('listening on 8080');
|
<|file_name|>multiclass_classification.py<|end_file_name|><|fim▁begin|>import os
import numpy as np
from sklearn.datasets import load_iris
from sklearn.ensemble import RandomForestClassifier
import shap
import mlflow
# prepare training data
X, y = load_iris(return_X_y=True, as_frame=True)
<|fim▁hole|># train a model
model = RandomForestClassifier()
model.fit(X, y)
# log an explanation
with mlflow.start_run() as run:
mlflow.shap.log_explanation(model.predict_proba, X)
# list artifacts
client = mlflow.tracking.MlflowClient()
artifact_path = "model_explanations_shap"
artifacts = [x.path for x in client.list_artifacts(run.info.run_id, artifact_path)]
print("# artifacts:")
print(artifacts)
# load back the logged explanation
dst_path = client.download_artifacts(run.info.run_id, artifact_path)
base_values = np.load(os.path.join(dst_path, "base_values.npy"))
shap_values = np.load(os.path.join(dst_path, "shap_values.npy"))
# show a force plot
shap.force_plot(base_values[0], shap_values[0, 0, :], X.iloc[0, :], matplotlib=True)<|fim▁end|>
| |
<|file_name|>test_instantialize_classes_filetypes.py<|end_file_name|><|fim▁begin|>import f311.filetypes as ft
def test_DataFile():
_ = ft.DataFile()
print(_)
def test_FileFits():
_ = ft.FileFits()<|fim▁hole|>
def test_FilePy():
_ = ft.FilePy()
def test_FileSQLiteDB():
_ = ft.FileSQLiteDB()
def test_FileSpectrum():
_ = ft.FileSpectrum()
print(_)
def test_FileSpectrumFits():
_ = ft.FileSpectrumFits()
print(_)
def test_FileSpectrumXY():
_ = ft.FileSpectrumXY()
print(_)
def test_Spectrum():
_ = ft.Spectrum()
print(_)<|fim▁end|>
|
print(_)
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.base import RedirectView
from preferences import preferences
from jmbo.urls import v1_api
from foundry.models import Page
from foundry import views, forms
from foundry.api import ListingResource, LinkResource, NavbarResource, \
MenuResource, PageResource, BlogPostResource
admin.autodiscover()
try:
import object_tools
object_tools.autodiscover()
except ImportError:
pass
v1_api.register(ListingResource())
v1_api.register(LinkResource())
v1_api.register(NavbarResource())
v1_api.register(MenuResource())
v1_api.register(PageResource())
v1_api.register(BlogPostResource())
urlpatterns = patterns('',
# Pre-empt url call for comment post
url(
r'^comments/post/$',
'foundry.views.post_comment',
{},
name='comments-post-comment'
),
(r'^favicon\.ico$', RedirectView.as_view(url='/static/images/favicon.ico', permanent=False)),
(r'^googlesearch/', include('googlesearch.urls')),
(r'^jmbo/', include('jmbo.urls')),
(r'^comments/', include('django.contrib.comments.urls')),
(r'^likes/', include('likes.urls')),
(r'^object-tools/', include(object_tools.tools.urls)),
(r'^ckeditor/', include('ckeditor.urls')),
(r'^contact/', include('contact.urls')),
(r'^post/', include('post.urls')),
(r'^simple-autocomplete/', include('simple_autocomplete.urls')),
(r'^jmbo-analytics/', include('jmbo_analytics.urls')),
url(r'social-auth', include('social_auth.urls')),
(r'^admin/', include(admin.site.urls)),
<|fim▁hole|> TemplateView.as_view(template_name='base.html'),
name='home'
),
url(
r'^logo/$',
TemplateView.as_view(template_name='foundry/logo.html'),
name='logo'
),
url(
r'^header/$',
TemplateView.as_view(template_name='foundry/inclusion_tags/header.html'),
name='header'
),
url(
r'^footer/$',
TemplateView.as_view(template_name='foundry/inclusion_tags/footer.html'),
name='footer'
),
# Join, login, password reset
url(
r'^join/$',
'foundry.views.join',
{},
name='join',
),
url(
r'^join-finish/$',
'foundry.views.join_finish',
{},
name='join-finish',
),
(r'^auth/', include('django.contrib.auth.urls')),
url(
r'^login/$',
'django.contrib.auth.views.login',
{'authentication_form': forms.LoginForm},
name='login',
),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page':'/'},
name='logout',
),
# Password reset with custom form
url(
r'^password_reset/$',
'django.contrib.auth.views.password_reset',
{
'password_reset_form': forms.PasswordResetForm,
},
name='password_reset',
),
# Pages defined in preferences
url(
r'^about-us/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.about_us,
title=_("About us")
),
name='about-us'
),
url(
r'^terms-and-conditions/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.terms_and_conditions,
title=_("Terms and conditions")
),
name='terms-and-conditions'
),
url(
r'^privacy-policy/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.privacy_policy,
title=_("Privacy policy")
),
name='privacy-policy'
),
# Age gateway
url(
r'^age-gateway/$',
'foundry.views.age_gateway',
{},
name='age-gateway',
),
# Listing
url(
r'^listing/(?P<slug>[\w-]+)/$',
'foundry.views.listing_detail',
{},
name='listing-detail'
),
# Listing feed
url(
r'^listing/(?P<slug>[\w-]+)/feed/$',
'foundry.feeds.listing_feed',
{},
name='listing-feed'
),
# Edit profile
url(r'^edit-profile/$',
login_required(
views.EditProfile.as_view(
form_class=forms.EditProfileForm,
template_name='foundry/edit_profile.html'
)
),
name='edit-profile'
),
# Complete profile
url(r'^complete-profile/$',
login_required(
views.EditProfile.as_view(
form_class=forms.EditProfileForm,
template_name='foundry/complete_profile.html'
)
),
name='complete-profile'
),
# Page detail
url(
r'^page/(?P<slug>[\w-]+)/$',
'foundry.views.page_detail',
{},
name='page-detail'
),
# Lorem ipsum
url(
r'^lorem-ipsum/$',
TemplateView.as_view(template_name='foundry/lorem_ipsum.html'),
name='lorem-ipsum'
),
# Search
url(
r'^search/$',
'foundry.views.search',
{},
name='search'
),
# Search results
url(
r'^search-results/$',
'foundry.views.search_results',
{},
name='search-results'
),
# Comment reply form in case of no javascript
url(
r'^comment-reply-form/$',
'foundry.views.comment_reply_form',
{},
name='comment-reply-form'
),
# Report comment
url(
r'^report-comment/(?P<comment_id>\d+)/$',
'foundry.views.report_comment',
{},
name='report-comment'
),
# Chatroom detail
url(
r'^chatroom/(?P<slug>[\w-]+)/$',
'foundry.views.chatroom_detail',
{},
name='chatroom-detail'
),
# Create blogpost
url(
r'^create-blogpost/$',
'foundry.views.create_blogpost',
{},
name='create-blogpost',
),
# Blogpost list
url(
r'^blogposts/$',
views.BlogPostObjectList.as_view(),
{'limit': 300},
name='blogpost_object_list'
),
# Blogpost detail
url(
r'^blogpost/(?P<slug>[\w-]+)/$',
views.BlogPostObjectDetail.as_view(),
{},
name='blogpost_object_detail'
),
# Member notifications
url(
r'^member-notifications/$',
login_required(views.member_notifications),
{},
name='member-notifications'
),
# User detail page
url(
r'^users/(?P<username>[=@\.\w-]+)/$',
'foundry.views.user_detail',
{},
name='user-detail'
),
# Coming soon
url(
r'^coming-soon/$',
TemplateView.as_view(template_name='foundry/coming_soon.html'),
name='coming-soon'
),
# Load new comments
url(
r'^fetch-new-comments-ajax/(?P<content_type_id>\d+)/(?P<oid>\d+)/(?P<last_comment_id>\d+)/$',
'foundry.views.fetch_new_comments_ajax',
{},
name='fetch-new-comments-ajax'
),
# Test views
url(
r'^test-plain-response/$',
'foundry.views.test_plain_response',
{},
name='test-plain-response'
),
url(
r'^test-redirect/$',
'foundry.views.test_redirect',
{},
name='test-redirect'
),
url(
r'^pages/$',
DetailView.as_view(),
{'queryset':Page.permitted.all().order_by('title')},
'page-list'
),
# Member detail page
url(
r'^members/(?P<username>[\w-]+)/$',
'foundry.views.member_detail',
{},
name='member-detail'
),
# Admin
url(
r'^admin-row-create-ajax/$',
'foundry.admin_views.row_create_ajax',
{},
name='admin-row-create-ajax',
),
url(
r'^admin-column-create-ajax/$',
'foundry.admin_views.column_create_ajax',
{},
name='admin-column-create-ajax',
),
url(
r'^admin-tile-create-ajax/$',
'foundry.admin_views.tile_create_ajax',
{},
name='admin-tile-create-ajax',
),
url(
r'^admin-row-edit-ajax/$',
'foundry.admin_views.row_edit_ajax',
{},
name='admin-row-edit-ajax',
),
url(
r'^admin-column-edit-ajax/$',
'foundry.admin_views.column_edit_ajax',
{},
name='admin-column-edit-ajax',
),
url(
r'^admin-tile-edit-ajax/$',
'foundry.admin_views.tile_edit_ajax',
{},
name='admin-tile-edit-ajax',
),
url(
r'^admin-row-delete-ajax/$',
'foundry.admin_views.row_delete_ajax',
{},
name='admin-row-delete-ajax',
),
url(
r'^admin-column-delete-ajax/$',
'foundry.admin_views.column_delete_ajax',
{},
name='admin-column-delete-ajax',
),
url(
r'^admin-tile-delete-ajax/$',
'foundry.admin_views.tile_delete_ajax',
{},
name='admin-tile-delete-ajax',
),
url(
r'^admin-persist-sort-ajax/$',
'foundry.admin_views.persist_sort_ajax',
{},
name='admin-persist-sort-ajax',
),
url(
r'^admin-remove-comment/(?P<comment_id>\d+)/$',
'foundry.admin_views.remove_comment',
{},
name='admin-remove-comment'
),
url(
r'^admin-allow-comment/(?P<comment_id>\d+)/$',
'foundry.admin_views.allow_comment',
{},
name='admin-allow-comment'
),
)
# Praekelt maintained Jmbo packages which are optional
if "banner" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^banner/', include('banner.urls')))
if "chart" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^chart/', include('chart.urls')))
if "competition" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^competition/', include('competition.urls')))
if "downloads" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^downloads/', include('downloads.urls')))
if "friends" in settings.INSTALLED_APPS:
# Friends has a fancy member detail page and needs to resolve first
urlpatterns.insert(1, url(r'^friends/', include('friends.urls')))
if "gallery" in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^gallery/', include('gallery.urls')),
(r'^admin/', include('gallery.admin_urls')),
)
if "jmbo_calendar" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^calendar/', include('jmbo_calendar.urls')))
if "jmbo_twitter" in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^jmbo_twitter', include('jmbo_twitter.urls')),
(r'^admin/', include('jmbo_twitter.admin_urls')),
)
if "music" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^music/', include('music.urls')))
if "poll" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^poll/', include('poll.urls')))
if "show" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^show/', include('show.urls')))
if "video" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^video/', include('video.urls')))
if "jmbo_sitemap" in settings.INSTALLED_APPS:
from jmbo_sitemap import sitemaps
from jmbo_sitemap.views import sitemap, SitemapHTMLView
urlpatterns += patterns(
'',
# Unidentified issue with Jmbo URLPatternItem class means
# (r'^', include('jmbo_sitemap.urls')) causes error. Use a workaround.
url(
r'^sitemap\.xml$',
sitemap,
{'sitemaps': sitemaps},
name='sitemap'
),
url(
r'^sitemap/$',
SitemapHTMLView.as_view(),
name='html-sitemap'
),
)
# Add api last because all resources are registered at this point
urlpatterns += patterns('', (r'^api/', include(v1_api.urls)))
urlpatterns += staticfiles_urlpatterns()
# Flatpages must be last
urlpatterns += patterns('', ('r^/', include('django.contrib.flatpages.urls')))
handler500 = 'foundry.views.server_error'
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
)<|fim▁end|>
|
url(
r'^$',
|
<|file_name|>chime.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "chime")]
extern crate rusoto_chime;
extern crate rusoto_core;
use rusoto_chime::{Chime, ChimeClient, ListAccountsRequest};
use rusoto_core::Region;<|fim▁hole|>async fn should_list_environments() {
let client = ChimeClient::new(Region::UsEast1);
let request = ListAccountsRequest::default();
let result = client.list_accounts(request).await.unwrap();
println!("{:#?}", result);
}<|fim▁end|>
|
#[tokio::test]
|
<|file_name|>example.go<|end_file_name|><|fim▁begin|>package igpay<|fim▁hole|>import (
"regexp"
"strings"
)
var vowel = regexp.MustCompile(`^([aeiou]|y[^aeiou]|xr)[a-z]*`)
var cons = regexp.MustCompile(`^([^aeiou]?qu|[^aeiou]+)([a-z]*)`)
func PigLatin(s string) string {
sw := strings.Fields(s)
for i, w := range sw {
l := strings.ToLower(w)
if vowel.MatchString(l) {
sw[i] = l + "ay"
} else if x := cons.FindStringSubmatchIndex(l); x != nil {
sw[i] = l[x[3]:] + l[:x[3]] + "ay"
}
}
return strings.Join(sw, " ")
}<|fim▁end|>
| |
<|file_name|>CbfModel.js<|end_file_name|><|fim▁begin|>/*
*
*/
Ext.define("core.cbf.model.CbfModel",{
extend:"Ext.data.Model",
fields:[
{name:"id",type:"int",srotable:false},
{name:"cbfbm",type:"string",srotable:false},
{name:"cbflx",type:"char",srotable:false},
{name:"cbfmc",type:"string",srotable:false},
{name:"cbfxb",type:"char",srotable:false},
{name:"cbfmz",type:"string",srotable:false},
{name:"cbfzjlx",type:"char",srotable:false},
{name:"cbfzjhm",type:"string",srotable:false},
{name:"cbfdz",type:"string",srotable:false},
{name:"yzbm",type:"string",srotable:false},
{name:"lxdh",type:"string",srotable:false},
{name:"cbfcysl",type:"int",srotable:false},
{name:"cbfdcrq",type:"date",srotable:false},
{name:"cbfdcy",type:"string",srotable:false},
{name:"cbfdcjs",type:"string",srotable:false},
{name:"gsjs",type:"string",srotable:false},
<|fim▁hole|> });<|fim▁end|>
|
{name:"gsjsr",type:"string",srotable:false},
{name:"gsshrq",type:"string",srotable:false}, //date
{name:"gsshr",type:"string",srotable:false}
]
|
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.<|fim▁hole|>//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
export const FALLBACK_DAG_MAX_NUM_SERVICES = 100 as 100;
export const FALLBACK_TRACE_NAME = '<trace-without-root-span>' as '<trace-without-root-span>';
export const FETCH_DONE = 'FETCH_DONE' as 'FETCH_DONE';
export const FETCH_ERROR = 'FETCH_ERROR' as 'FETCH_ERROR';
export const FETCH_LOADING = 'FETCH_LOADING' as 'FETCH_LOADING';
export const fetchedState = {
DONE: FETCH_DONE,
ERROR: FETCH_ERROR,
LOADING: FETCH_LOADING,
};<|fim▁end|>
|
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>selectors.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput, ToCss};
use selectors::parser::SelectorList;
use style::selector_parser::{SelectorImpl, SelectorParser};
use style::stylesheets::{Origin, Namespaces};
use style_traits::ParseError;<|fim▁hole|> ns.prefixes.insert("svg".into(), (ns!(svg), ()));
let parser = SelectorParser {
stylesheet_origin: Origin::UserAgent,
namespaces: &ns,
};
SelectorList::parse(&parser, input)
}
#[test]
fn test_selectors() {
assert_roundtrip!(parse_selector, "div");
assert_roundtrip!(parse_selector, "svg|circle");
assert_roundtrip!(parse_selector, "p:before", "p::before");
assert_roundtrip!(parse_selector, "[border=\"0\"]:-servo-nonzero-border ~ ::-servo-details-summary");
}<|fim▁end|>
|
fn parse_selector<'i, 't>(input: &mut Parser<'i, 't>) -> Result<SelectorList<SelectorImpl>, ParseError<'i>> {
let mut ns = Namespaces::default();
|
<|file_name|>input.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green ([email protected]) and the
# RMG Team ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import logging
import quantities
import os
from rmgpy import settings
from rmgpy.molecule import Molecule
from rmgpy.quantity import Quantity
from rmgpy.solver.base import TerminationTime, TerminationConversion
from rmgpy.solver.simple import SimpleReactor
from rmgpy.solver.liquid import LiquidReactor
from model import CoreEdgeReactionModel
################################################################################
class InputError(Exception): pass
################################################################################
rmg = None
speciesDict = {}
def database(
thermoLibraries = None,
reactionLibraries = None,
frequenciesLibraries = None,
seedMechanisms = None,
kineticsFamilies = 'default',
kineticsDepositories = 'default',
kineticsEstimator = 'group additivity',
):
# This function just stores the information about the database to be loaded
# We don't actually load the database until after we're finished reading
# the input file
if isinstance(thermoLibraries, str): thermoLibraries = [thermoLibraries]
if isinstance(reactionLibraries, str): reactionLibraries = [reactionLibraries]
if isinstance(seedMechanisms, str): seedMechanisms = [seedMechanisms]
if isinstance(frequenciesLibraries, str): frequenciesLibraries = [frequenciesLibraries]
rmg.databaseDirectory = settings['database.directory']
rmg.thermoLibraries = thermoLibraries or []
rmg.reactionLibraries = reactionLibraries or []
rmg.seedMechanisms = seedMechanisms or []
rmg.statmechLibraries = frequenciesLibraries or []
rmg.kineticsEstimator = kineticsEstimator
if kineticsDepositories == 'default':
rmg.kineticsDepositories = ['training']
elif kineticsDepositories == 'all':
rmg.kineticsDepositories = None
else:
if not isinstance(kineticsDepositories,list):
raise InputError("kineticsDepositories should be either 'default', 'all', or a list of names eg. ['training','PrIMe'].")
rmg.kineticsDepositories = kineticsDepositories
if kineticsFamilies in ('default', 'all', 'none'):
rmg.kineticsFamilies = kineticsFamilies
else:
if not isinstance(kineticsFamilies,list):
raise InputError("kineticsFamilies should be either 'default', 'all', 'none', or a list of names eg. ['H_Abstraction','R_Recombination'] or ['!Intra_Disproportionation'].")
rmg.kineticsFamilies = kineticsFamilies
def species(label, structure, reactive=True):
logging.debug('Found {0} species "{1}" ({2})'.format('reactive' if reactive else 'nonreactive', label, structure.toSMILES()))
spec, isNew = rmg.reactionModel.makeNewSpecies(structure, label=label, reactive=reactive)
if not isNew:
raise InputError("Species {0} is a duplicate of {1}. Species in input file must be unique".format(label,spec.label))
rmg.initialSpecies.append(spec)
speciesDict[label] = spec
def SMARTS(string):
return Molecule().fromSMARTS(string)
def SMILES(string):
return Molecule().fromSMILES(string)
def InChI(string):
return Molecule().fromInChI(string)
def adjacencyList(string):
return Molecule().fromAdjacencyList(string)
# Reaction systems
def simpleReactor(temperature,
pressure,
initialMoleFractions,
terminationConversion=None,
terminationTime=None,
sensitivity=None,
sensitivityThreshold=1e-3
):
logging.debug('Found SimpleReactor reaction system')
for value in initialMoleFractions.values():
if value < 0:
raise InputError('Initial mole fractions cannot be negative.')
if sum(initialMoleFractions.values()) != 1:
logging.warning('Initial mole fractions do not sum to one; renormalizing.')
for spec in initialMoleFractions:
initialMoleFractions[spec] /= sum(initialMoleFractions.values())
T = Quantity(temperature)
P = Quantity(pressure)
termination = []
if terminationConversion is not None:
for spec, conv in terminationConversion.iteritems():
termination.append(TerminationConversion(speciesDict[spec], conv))
if terminationTime is not None:
termination.append(TerminationTime(Quantity(terminationTime)))
if len(termination) == 0:
raise InputError('No termination conditions specified for reaction system #{0}.'.format(len(rmg.reactionSystems)+2))
sensitiveSpecies = []
if sensitivity:
for spec in sensitivity:
sensitiveSpecies.append(speciesDict[spec])
system = SimpleReactor(T, P, initialMoleFractions, termination, sensitiveSpecies, sensitivityThreshold)
rmg.reactionSystems.append(system)
# Reaction systems
def liquidReactor(temperature,
initialConcentrations,
terminationConversion=None,
terminationTime=None,
sensitivity=None,
sensitivityThreshold=1e-3):
logging.debug('Found LiquidReactor reaction system')
T = Quantity(temperature)
for spec,conc in initialConcentrations.iteritems():
concentration = Quantity(conc)
# check the dimensions are ok
# convert to mol/m^3 (or something numerically nice? or must it be SI)
initialConcentrations[spec] = concentration.value_si<|fim▁hole|> termination = []
if terminationConversion is not None:
for spec, conv in terminationConversion.iteritems():
termination.append(TerminationConversion(speciesDict[spec], conv))
if terminationTime is not None:
termination.append(TerminationTime(Quantity(terminationTime)))
if len(termination) == 0:
raise InputError('No termination conditions specified for reaction system #{0}.'.format(len(rmg.reactionSystems)+2))
sensitiveSpecies = []
if sensitivity:
for spec in sensitivity:
sensitiveSpecies.append(speciesDict[spec])
system = LiquidReactor(T, initialConcentrations, termination, sensitiveSpecies, sensitivityThreshold)
rmg.reactionSystems.append(system)
def simulator(atol, rtol, sens_atol=1e-6, sens_rtol=1e-4):
rmg.absoluteTolerance = atol
rmg.relativeTolerance = rtol
rmg.sensitivityAbsoluteTolerance = sens_atol
rmg.sensitivityRelativeTolerance = sens_rtol
def solvation(solvent):
# If solvation module in input file, set the RMG solvent variable
if not isinstance(solvent,str):
raise InputError("solvent should be a string like 'water'")
rmg.solvent = solvent
def model(toleranceMoveToCore=None, toleranceKeepInEdge=0.0, toleranceInterruptSimulation=1.0, maximumEdgeSpecies=None):
"""
How to generate the model. `toleranceMoveToCore` must be specified. Other parameters are optional and control the pruning.
"""
if toleranceMoveToCore is None:
raise InputError("You must provide a toleranceMoveToCore value. It should be less than or equal to toleranceInterruptSimulation which is currently {0}".format(toleranceInterruptSimulation))
if toleranceMoveToCore > toleranceInterruptSimulation:
raise InputError("toleranceMoveToCore must be less than or equal to toleranceInterruptSimulation, which is currently {0}".format(toleranceInterruptSimulation))
rmg.fluxToleranceKeepInEdge = toleranceKeepInEdge
rmg.fluxToleranceMoveToCore = toleranceMoveToCore
rmg.fluxToleranceInterrupt = toleranceInterruptSimulation
rmg.maximumEdgeSpecies = maximumEdgeSpecies
def quantumMechanics(
software,
method,
fileStore = None,
scratchDirectory = None,
onlyCyclics = False,
maxRadicalNumber = 0,
):
from rmgpy.qm.main import QMCalculator
rmg.quantumMechanics = QMCalculator()
rmg.quantumMechanics.settings.software = software
rmg.quantumMechanics.settings.method = method
rmg.quantumMechanics.settings.fileStore = fileStore
rmg.quantumMechanics.settings.scratchDirectory = scratchDirectory
rmg.quantumMechanics.settings.onlyCyclics = onlyCyclics
rmg.quantumMechanics.settings.maxRadicalNumber = maxRadicalNumber
def pressureDependence(
method,
temperatures,
pressures,
maximumGrainSize = 0.0,
minimumNumberOfGrains = 0,
interpolation = None,
maximumAtoms=None,
):
from rmgpy.cantherm.pdep import PressureDependenceJob
# Setting the pressureDependence attribute to non-None enables pressure dependence
rmg.pressureDependence = PressureDependenceJob(network=None)
# Process method
rmg.pressureDependence.method = method
# Process interpolation model
rmg.pressureDependence.interpolationModel = interpolation
# Process temperatures
Tmin, Tmax, Tunits, Tcount = temperatures
rmg.pressureDependence.Tmin = Quantity(Tmin, Tunits)
rmg.pressureDependence.Tmax = Quantity(Tmax, Tunits)
rmg.pressureDependence.Tcount = Tcount
rmg.pressureDependence.generateTemperatureList()
# Process pressures
Pmin, Pmax, Punits, Pcount = pressures
rmg.pressureDependence.Pmin = Quantity(Pmin, Punits)
rmg.pressureDependence.Pmax = Quantity(Pmax, Punits)
rmg.pressureDependence.Pcount = Pcount
rmg.pressureDependence.generatePressureList()
# Process grain size and count
rmg.pressureDependence.maximumGrainSize = Quantity(maximumGrainSize)
rmg.pressureDependence.minimumGrainCount = minimumNumberOfGrains
# Process maximum atoms
rmg.pressureDependence.maximumAtoms = maximumAtoms
rmg.pressureDependence.activeJRotor = True
rmg.pressureDependence.activeKRotor = True
rmg.pressureDependence.rmgmode = True
def options(units='si', saveRestartPeriod=None, drawMolecules=False, generatePlots=False, saveSimulationProfiles=False, verboseComments=False, saveEdgeSpecies=False):
rmg.units = units
rmg.saveRestartPeriod = Quantity(saveRestartPeriod) if saveRestartPeriod else None
rmg.drawMolecules = drawMolecules
rmg.generatePlots = generatePlots
rmg.saveSimulationProfiles = saveSimulationProfiles
rmg.verboseComments = verboseComments
rmg.saveEdgeSpecies = saveEdgeSpecies
def generatedSpeciesConstraints(**kwargs):
validConstraints = [
'allowed',
'maximumCarbonAtoms',
'maximumHydrogenAtoms',
'maximumOxygenAtoms',
'maximumNitrogenAtoms',
'maximumSiliconAtoms',
'maximumSulfurAtoms',
'maximumHeavyAtoms',
'maximumRadicalElectrons',
]
for key, value in kwargs.items():
if key not in validConstraints:
raise InputError('Invalid generated species constraint {0!r}.'.format(key))
rmg.speciesConstraints[key] = value
################################################################################
def readInputFile(path, rmg0):
"""
Read an RMG input file at `path` on disk into the :class:`RMG` object
`rmg`.
"""
global rmg, speciesDict
full_path = os.path.abspath(os.path.expandvars(path))
try:
f = open(full_path)
except IOError, e:
logging.error('The input file "{0}" could not be opened.'.format(full_path))
logging.info('Check that the file exists and that you have read access.')
raise e
logging.info('Reading input file "{0}"...'.format(full_path))
rmg = rmg0
rmg.reactionModel = CoreEdgeReactionModel()
rmg.initialSpecies = []
rmg.reactionSystems = []
speciesDict = {}
global_context = { '__builtins__': None }
local_context = {
'__builtins__': None,
'True': True,
'False': False,
'database': database,
'species': species,
'SMARTS': SMARTS,
'SMILES': SMILES,
'InChI': InChI,
'adjacencyList': adjacencyList,
'simpleReactor': simpleReactor,
'liquidReactor': liquidReactor,
'simulator': simulator,
'solvation': solvation,
'model': model,
'quantumMechanics': quantumMechanics,
'pressureDependence': pressureDependence,
'options': options,
'generatedSpeciesConstraints': generatedSpeciesConstraints,
}
try:
exec f in global_context, local_context
except (NameError, TypeError, SyntaxError), e:
logging.error('The input file "{0}" was invalid:'.format(full_path))
logging.exception(e)
raise
finally:
f.close()
# convert keys from species names into species objects.
for reactionSystem in rmg.reactionSystems:
reactionSystem.convertInitialKeysToSpeciesObjects(speciesDict)
logging.info('')
################################################################################
def readThermoInputFile(path, rmg0):
"""
Read an thermo estimation input file at `path` on disk into the :class:`RMG` object
`rmg`.
"""
global rmg, speciesDict
full_path = os.path.abspath(os.path.expandvars(path))
try:
f = open(full_path)
except IOError, e:
logging.error('The input file "{0}" could not be opened.'.format(full_path))
logging.info('Check that the file exists and that you have read access.')
raise e
logging.info('Reading input file "{0}"...'.format(full_path))
rmg = rmg0
rmg.reactionModel = CoreEdgeReactionModel()
rmg.initialSpecies = []
rmg.reactionSystems = []
speciesDict = {}
global_context = { '__builtins__': None }
local_context = {
'__builtins__': None,
'True': True,
'False': False,
'database': database,
'species': species,
'SMARTS': SMARTS,
'SMILES': SMILES,
'InChI': InChI,
'solvation': solvation,
'adjacencyList': adjacencyList,
'quantumMechanics': quantumMechanics,
}
try:
exec f in global_context, local_context
except (NameError, TypeError, SyntaxError), e:
logging.error('The input file "{0}" was invalid:'.format(full_path))
logging.exception(e)
raise
finally:
f.close()
logging.info('')
################################################################################
def saveInputFile(path, rmg):
"""
Save an RMG input file at `path` on disk from the :class:`RMG` object
`rmg`.
"""
f = open(path, 'w')
# Databases
f.write('database(\n')
#f.write(' "{0}",\n'.format(rmg.databaseDirectory))
f.write(' thermoLibraries = {0!r},\n'.format(rmg.thermoLibraries))
f.write(' reactionLibraries = {0!r},\n'.format(rmg.reactionLibraries))
f.write(' seedMechanisms = {0!r},\n'.format(rmg.seedMechanisms))
f.write(' kineticsDepositories = {0!r},\n'.format(rmg.kineticsDepositories))
f.write(' kineticsFamilies = {0!r},\n'.format(rmg.kineticsFamilies))
f.write(' kineticsEstimator = {0!r},\n'.format(rmg.kineticsEstimator))
f.write(')\n\n')
# Species
for species in rmg.initialSpecies:
f.write('species(\n')
f.write(' label = "{0}",\n'.format(species.label))
f.write(' reactive = {0},\n'.format(species.reactive))
f.write(' structure = adjacencyList(\n')
f.write('"""\n')
f.write(species.molecule[0].toAdjacencyList())
f.write('"""),\n')
f.write(')\n\n')
# Reaction systems
for system in rmg.reactionSystems:
if rmg.solvent:
f.write('liquidReactor(\n')
f.write(' temperature = ({0:g},"{1!s}"),\n'.format(system.T.getValue(),system.T.units))
f.write(' initialConcentrations={\n')
for species, conc in system.initialConcentrations.iteritems():
f.write(' "{0!s}": ({1:g},"{2!s}"),\n'.format(species.label,conc.getValue(),conc.units))
else:
f.write('simpleReactor(\n')
f.write(' temperature = ({0:g},"{1!s}"),\n'.format(system.T.getValue(),system.T.units))
# Convert the pressure from SI pascal units to bar here
# Do something more fancy later for converting to user's desired units for both T and P..
f.write(' pressure = ({0:g},"{1!s}"),\n'.format(system.P.getValue(),system.P.units))
f.write(' initialMoleFractions={\n')
for species, molfrac in system.initialMoleFractions.iteritems():
f.write(' "{0!s}": {1:g},\n'.format(species.label, molfrac))
f.write(' },\n')
# Termination criteria
conversions = ''
for term in system.termination:
if isinstance(term, TerminationTime):
f.write(' terminationTime = ({0:g},"{1!s}"),\n'.format(term.time.getValue(),term.time.units))
else:
conversions += ' "{0:s}": {1:g},\n'.format(term.species.label, term.conversion)
if conversions:
f.write(' terminationConversion = {\n')
f.write(conversions)
f.write(' },\n')
# Sensitivity analysis
if system.sensitivity:
f.write(' sensitivity = {0},\n'.format(system.sensitivity))
f.write(' sensitivityThreshold = {0},\n'.format(system.sensitivityThreshold))
f.write(')\n\n')
if rmg.solvent:
f.write("solvation(\n solvent = '{0!s}'\n)\n\n".format(rmg.solvent))
# Simulator tolerances
f.write('simulator(\n')
f.write(' atol = {0:g},\n'.format(rmg.absoluteTolerance))
f.write(' rtol = {0:g},\n'.format(rmg.relativeTolerance))
f.write(')\n\n')
# Model
f.write('model(\n')
f.write(' toleranceMoveToCore = {0:g},\n'.format(rmg.fluxToleranceMoveToCore))
f.write(' toleranceKeepInEdge = {0:g},\n'.format(rmg.fluxToleranceKeepInEdge))
f.write(' toleranceInterruptSimulation = {0:g},\n'.format(rmg.fluxToleranceInterrupt))
f.write(' maximumEdgeSpecies = {0:d},\n'.format(rmg.maximumEdgeSpecies))
f.write(')\n\n')
# Pressure Dependence
if rmg.pressureDependence:
f.write('pressureDependence(\n')
f.write(' method = "{0!s}",\n'.format(rmg.pressureDependence.method))
f.write(' maximumGrainSize = ({0:g},"{1!s}"),\n'.format(rmg.pressureDependence.grainSize.getValue(),rmg.pressureDependence.grainSize.units))
f.write(' minimumNumberOfGrains = {0},\n'.format(rmg.pressureDependence.grainCount))
f.write(' temperatures = ({0:g},{1:g},"{2!s}",{3:d}),\n'.format(
rmg.pressureDependence.Tmin.getValue(),
rmg.pressureDependence.Tmax.getValue(),
rmg.pressureDependence.Tmax.units,
rmg.pressureDependence.Tcount,
))
f.write(' pressures = ({0:g},{1:g},"{2!s}",{3:d}),\n'.format(
rmg.pressureDependence.Pmin.getValue(),
rmg.pressureDependence.Pmax.getValue(),
rmg.pressureDependence.Pmax.units,
rmg.pressureDependence.Pcount,
))
f.write(' interpolation = {0},\n'.format(rmg.pressureDependence.model))
f.write(')\n\n')
if rmg.quantumMechanics:
f.write('quantumMechanics(\n')
f.write(' software="{0!s}",\n'.format(rmg.quantumMechanics.settings.software))
f.write(' method="{0!s}",\n'.format(rmg.quantumMechanics.settings.method))
f.write(' onlyCyclics="{0}",\n'.format(rmg.quantumMechanics.settings.onlyCyclics))
f.write(' maxRadicalNumber="{0!s}",\n'.format(rmg.quantumMechanics.settings.maxRadicalNumber))
f.write(')\n\n')
# Options
f.write('options(\n')
f.write(' units = "{0}",\n'.format(rmg.units))
if rmg.saveRestartPeriod:
f.write(' saveRestartPeriod = ({0},"{1}"),\n'.format(rmg.saveRestartPeriod.getValue(), rmg.saveRestartPeriod.units))
else:
f.write(' saveRestartPeriod = None,\n')
f.write(' drawMolecules = {0},\n'.format(rmg.drawMolecules))
f.write(' generatePlots = {0},\n'.format(rmg.generatePlots))
f.write(' saveSimulationProfiles = {0},\n'.format(rmg.saveSimulationProfiles))
f.write(' verboseComments = {0},\n'.format(rmg.verboseComments))
f.write(')\n\n')
f.close()<|fim▁end|>
| |
<|file_name|>functions_6f.js<|end_file_name|><|fim▁begin|>var searchData=
[<|fim▁hole|>];<|fim▁end|>
|
['out',['Out',['../classNumericalSHOCK.html#a74f7811f85ca6d4e7192cc143926f3b4',1,'NumericalSHOCK']]]
|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp'),
plumber = require('gulp-plumber'),
browserify = require('gulp-browserify'),
concat = require('gulp-concat'),
gulpif = require('gulp-if'),
uglify = require('gulp-uglify'),
jshint = require('gulp-jshint'),
stylish = require('jshint-stylish'),
sequence = require('run-sequence'),
less = require('gulp-less'),
zip = require('gulp-zip'),
rev = require('gulp-rev-append'),<|fim▁hole|> gutil = require('gulp-util');
var production = gutil.env.type === "production";
var game_name = gutil.env.name || 'fp'
var paths = {
source: {
canvas_js: './app/js/' + game_name + '/canvas.js',
web_js: './app/js/' + game_name + '/web.js',
canvas_css: './app/less/' + game_name + '/canvas.less',
web_css: './app/less/' + game_name + '/web.less',
baseJsDir: './app/js/**',
js: './app/js/**/*.js',
css: './app/less/**/*.less',
libs: [
'./bower_components/phaser/build/phaser.js'
]
},
dest: {
base: './public/' + game_name + '/',
html: './public/' + game_name + '/index.html',
js: './public/' + game_name + '/js',
css: './public/' + game_name + '/css'
}
};
gulp.task('rev', function() {
gulp.src(paths.dest.html)
.pipe(rev())
.pipe(gulp.dest(paths.dest.base));
});
gulp.task('copy_libs', function () {
gulp.src(paths.source.libs)
.pipe(uglify({outSourceMaps: false}))
.pipe(gulp.dest(paths.dest.js));
});
gulp.task('canvas_js', function() {
gulp.src(paths.source.canvas_js)
.pipe(plumber())
.pipe(browserify())
.pipe(concat('canvas.js'))
.pipe(gulpif(production, uglify()))
.pipe(gulp.dest(paths.dest.js));
});
gulp.task('web_js', function() {
gulp.src(paths.source.web_js)
.pipe(plumber())
.pipe(browserify())
.pipe(concat('web.js'))
.pipe(gulpif(production, uglify()))
.pipe(gulp.dest(paths.dest.js));
});
gulp.task('canvas_css', function() {
gulp.src(paths.source.canvas_css)
.pipe(plumber())
.pipe(less({ compress: true }))
.pipe(gulp.dest(paths.dest.css));
});
gulp.task('web_css', function() {
gulp.src(paths.source.web_css)
.pipe(plumber())
.pipe(less({ compress: true }))
.pipe(gulp.dest(paths.dest.css));
});
gulp.task('lint', function() {
gulp.src(paths.source.js)
.pipe(jshint())
.pipe(jshint.reporter(stylish));
});
gulp.task('watch', function() {
gulp.watch(paths.source.baseJsDir, function() {
sequence('canvas_js', 'web_js', 'lint')
});
gulp.watch(paths.source.css, function() {
sequence('canvas_css', 'web_css')
})
});
gulp.task('zip', function () {
return gulp.src([
'public/' + game_name + '/**/*'
])
.pipe(zip(game_name +'_dist.zip'))
.pipe(gulp.dest('./dist'))
});
gulp.task('build', [
'canvas_js',
'web_js',
'canvas_css',
'web_css',
'rev'
]);<|fim▁end|>
| |
<|file_name|>default.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, Item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
pub fn expand_deriving_default(cx: &ExtCtxt,
span: Span,
mitem: @MetaItem,
in_items: ~[@Item])
-> ~[@Item] {
let trait_def = TraitDef {
cx: cx, span: span,
path: Path::new(~["std", "default", "Default"]),
additional_bounds: ~[],
generics: LifetimeBounds::empty(),
methods: ~[
MethodDef {
name: "default",
generics: LifetimeBounds::empty(),
explicit_self: None,
args: ~[],
ret_ty: Self,
inline: true,
const_nonmatching: false,
combine_substructure: default_substructure
},
]
};
trait_def.expand(mitem, in_items)
}
fn default_substructure(cx: &ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
let default_ident = ~[
cx.ident_of("std"),
cx.ident_of("default"),
cx.ident_of("Default"),
cx.ident_of("default")
];
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ~[]);
return match *substr.fields {
StaticStruct(_, ref summary) => {
match *summary {
Unnamed(ref fields) => {
if fields.is_empty() {
cx.expr_ident(span, substr.type_ident)
} else {<|fim▁hole|> }
}
Named(ref fields) => {
let default_fields = fields.map(|&(ident, span)| {
cx.field_imm(span, ident, default_call(span))
});
cx.expr_struct_ident(span, substr.type_ident, default_fields)
}
}
}
StaticEnum(..) => {
cx.span_fatal(span, "`Default` cannot be derived for enums, \
only structs")
}
_ => cx.bug("Non-static method in `deriving(Default)`")
};
}<|fim▁end|>
|
let exprs = fields.map(|sp| default_call(*sp));
cx.expr_call_ident(span, substr.type_ident, exprs)
|
<|file_name|>CloudWaveJNI.java<|end_file_name|><|fim▁begin|>/*******************************************************************************<|fim▁hole|> * Copyright 2015 Software Evolution and Architecture Lab, University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package CloudWave;
public enum CloudWaveJNI {
instance;
public static final String CLOUDWAVE_LIB = "cloudwavejni";
CloudWaveJNI() {System.loadLibrary(CLOUDWAVE_LIB);}
public static CloudWaveJNI getInstance(){return instance;}
public void init() throws CloudWaveException{
int r = initJNI();
if (r<0) {
System.err.println("initJNI returned " + r);
throw new CloudWaveException();
}
}
public void free(){
freeJNI();
}
protected IEventHandler eventHandler;
public IEventHandler getEventHandler() {
return eventHandler;
}
public void setEventHandler(IEventHandler eh) {
synchronized(this){ eventHandler = eh;}
}
public void doEvent(String event){
synchronized(this) {
if (eventHandler!=null)
eventHandler.doEvent(event);
}
}
protected synchronized static void callback(String event){
instance.doEvent(event);
}
//#: Init/Free
public native int initJNI();
protected native int freeJNI();
//:#
//#: Log
protected native int initLog();
protected native int freeLog();
protected native int setLogId(String id);
protected native String getLogId();
protected native int recordLog(int level, String message);
protected native int recordLogL(int level, String message, long id);
//:#
//#: Metric
protected native int initMetric();
protected native int freeMetric();
protected native int recordMetricL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordMetricD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordMetricS(int source, String name, String mdata, String munit, int type, String value);
protected native int recordEventL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordEventD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordEventS(int source, String name, String mdata, String munit, int type, String value);
//:#
//#: Events
protected native int initEvent();
protected native int freeEvent();
protected native int postEvent(String event_json);
protected native long subscribe(String event_id);
protected native int unsubscribe(long id);
//:#
}<|fim▁end|>
| |
<|file_name|>dom.ts<|end_file_name|><|fim▁begin|>// (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Injectable, SimpleChange, ElementRef } from '@angular/core';
import {
LoadingController, Loading, ToastController, Toast, AlertController, Alert, Platform, Content, PopoverController,
ModalController,
} from 'ionic-angular';
import { DomSanitizer } from '@angular/platform-browser';
import { TranslateService } from '@ngx-translate/core';
import { CoreTextUtilsProvider } from './text';
import { CoreAppProvider } from '../app';
import { CoreConfigProvider } from '../config';
import { CoreEventsProvider } from '../events';
import { CoreLoggerProvider } from '../logger';
import { CoreUrlUtilsProvider } from './url';
import { CoreFileProvider } from '@providers/file';
import { CoreConstants } from '@core/constants';
import { CoreBSTooltipComponent } from '@components/bs-tooltip/bs-tooltip';
import { Md5 } from 'ts-md5/dist/md5';
import { Subject } from 'rxjs';
/**
* Interface that defines an extension of the Ionic Alert class, to support multiple listeners.
*/
export interface CoreAlert extends Alert {
/**
* Observable that will notify when the alert is dismissed.
*/
didDismiss: Subject<{data: any, role: string}>;
/**
* Observable that will notify when the alert will be dismissed.
*/
willDismiss: Subject<{data: any, role: string}>;
}
/*
* "Utils" service with helper functions for UI, DOM elements and HTML code.
*/
@Injectable()
export class CoreDomUtilsProvider {
// List of input types that support keyboard.
protected INPUT_SUPPORT_KEYBOARD = ['date', 'datetime', 'datetime-local', 'email', 'month', 'number', 'password',
'search', 'tel', 'text', 'time', 'url', 'week'];
protected INSTANCE_ID_ATTR_NAME = 'core-instance-id';
protected template = document.createElement('template'); // A template element to convert HTML to element.
protected matchesFn: string; // Name of the "matches" function to use when simulating a closest call.
protected instances: {[id: string]: any} = {}; // Store component/directive instances by id.
protected lastInstanceId = 0;
protected debugDisplay = false; // Whether to display debug messages. Store it in a variable to make it synchronous.
protected displayedAlerts = {}; // To prevent duplicated alerts.
protected logger;
constructor(protected translate: TranslateService,
protected loadingCtrl: LoadingController,
protected toastCtrl: ToastController,
protected alertCtrl: AlertController,
protected textUtils: CoreTextUtilsProvider,
protected appProvider: CoreAppProvider,
protected platform: Platform,
protected configProvider: CoreConfigProvider,
protected urlUtils: CoreUrlUtilsProvider,
protected modalCtrl: ModalController,
protected sanitizer: DomSanitizer,
protected popoverCtrl: PopoverController,
protected fileProvider: CoreFileProvider,
loggerProvider: CoreLoggerProvider,
protected eventsProvider: CoreEventsProvider) {
this.logger = loggerProvider.getInstance('CoreDomUtilsProvider');
// Check if debug messages should be displayed.
configProvider.get(CoreConstants.SETTINGS_DEBUG_DISPLAY, false).then((debugDisplay) => {
this.debugDisplay = !!debugDisplay;
});
}
/**
* Equivalent to element.closest(). If the browser doesn't support element.closest, it will
* traverse the parents to achieve the same functionality.
* Returns the closest ancestor of the current element (or the current element itself) which matches the selector.
*
* @param element DOM Element.
* @param selector Selector to search.
* @return Closest ancestor.
*/
closest(element: HTMLElement, selector: string): Element {
// Try to use closest if the browser supports it.
if (typeof element.closest == 'function') {
return element.closest(selector);
}
if (!this.matchesFn) {
// Find the matches function supported by the browser.
['matches', 'webkitMatchesSelector', 'mozMatchesSelector', 'msMatchesSelector', 'oMatchesSelector'].some((fn) => {
if (typeof document.body[fn] == 'function') {
this.matchesFn = fn;
return true;
}
return false;
});
if (!this.matchesFn) {
return;
}
}
// Traverse parents.
while (element) {
if (element[this.matchesFn](selector)) {
return element;
}
element = element.parentElement;
}
}
/**
* If the download size is higher than a certain threshold shows a confirm dialog.
*
* @param size Object containing size to download and a boolean to indicate if its totally or partialy calculated.
* @param message Code of the message to show. Default: 'core.course.confirmdownload'.
* @param unknownMessage ID of the message to show if size is unknown.
* @param wifiThreshold Threshold to show confirm in WiFi connection. Default: CoreWifiDownloadThreshold.
* @param limitedThreshold Threshold to show confirm in limited connection. Default: CoreDownloadThreshold.
* @param alwaysConfirm True to show a confirm even if the size isn't high, false otherwise.
* @return Promise resolved when the user confirms or if no confirm needed.
*/
confirmDownloadSize(size: any, message?: string, unknownMessage?: string, wifiThreshold?: number, limitedThreshold?: number,
alwaysConfirm?: boolean): Promise<void> {
const readableSize = this.textUtils.bytesToSize(size.size, 2);
const getAvailableBytes = new Promise((resolve): void => {
if (this.appProvider.isDesktop()) {
// Free space calculation is not supported on desktop.
resolve(null);
} else {
this.fileProvider.calculateFreeSpace().then((availableBytes) => {
if (this.platform.is('android')) {
return availableBytes;
} else {
// Space calculation is not accurate on iOS, but it gets more accurate when space is lower.
// We'll only use it when space is <500MB, or we're downloading more than twice the reported space.
if (availableBytes < CoreConstants.IOS_FREE_SPACE_THRESHOLD || size.size > availableBytes / 2) {
return availableBytes;
} else {
return null;
}
}
}).then((availableBytes) => {
resolve(availableBytes);
});
}
});
const getAvailableSpace = getAvailableBytes.then((availableBytes: number) => {
if (availableBytes === null) {
return '';
} else {
const availableSize = this.textUtils.bytesToSize(availableBytes, 2);
if (this.platform.is('android') && size.size > availableBytes - CoreConstants.MINIMUM_FREE_SPACE) {
return Promise.reject(this.translate.instant('core.course.insufficientavailablespace', { size: readableSize }));<|fim▁hole|> });
return getAvailableSpace.then((availableSpace) => {
wifiThreshold = typeof wifiThreshold == 'undefined' ? CoreConstants.WIFI_DOWNLOAD_THRESHOLD : wifiThreshold;
limitedThreshold = typeof limitedThreshold == 'undefined' ? CoreConstants.DOWNLOAD_THRESHOLD : limitedThreshold;
let wifiPrefix = '';
if (this.appProvider.isNetworkAccessLimited()) {
wifiPrefix = this.translate.instant('core.course.confirmlimiteddownload');
}
if (size.size < 0 || (size.size == 0 && !size.total)) {
// Seems size was unable to be calculated. Show a warning.
unknownMessage = unknownMessage || 'core.course.confirmdownloadunknownsize';
return this.showConfirm(wifiPrefix + this.translate.instant(unknownMessage, {availableSpace: availableSpace}));
} else if (!size.total) {
// Filesize is only partial.
return this.showConfirm(wifiPrefix + this.translate.instant('core.course.confirmpartialdownloadsize',
{ size: readableSize, availableSpace: availableSpace }));
} else if (alwaysConfirm || size.size >= wifiThreshold ||
(this.appProvider.isNetworkAccessLimited() && size.size >= limitedThreshold)) {
message = message || (size.size === 0 ? 'core.course.confirmdownloadzerosize' : 'core.course.confirmdownload');
return this.showConfirm(wifiPrefix + this.translate.instant(message,
{ size: readableSize, availableSpace: availableSpace }));
}
return Promise.resolve();
});
}
/**
* Convert some HTML as text into an HTMLElement. This HTML is put inside a div or a body.
*
* @param html Text to convert.
* @return Element.
*/
convertToElement(html: string): HTMLElement {
// Add a div to hold the content, that's the element that will be returned.
this.template.innerHTML = '<div>' + html + '</div>';
return <HTMLElement> this.template.content.children[0];
}
/**
* Create a "cancelled" error. These errors won't display an error message in showErrorModal functions.
*
* @return The error object.
*/
createCanceledError(): any {
return {coreCanceled: true};
}
/**
* Given a list of changes for a component input detected by a KeyValueDiffers, create an object similar to the one
* passed to the ngOnChanges functions.
*
* @param changes Changes detected by KeyValueDiffer.
* @return Changes in a format like ngOnChanges.
*/
createChangesFromKeyValueDiff(changes: any): { [name: string]: SimpleChange } {
const newChanges: { [name: string]: SimpleChange } = {};
// Added items are considered first change.
changes.forEachAddedItem((item) => {
newChanges[item.key] = new SimpleChange(item.previousValue, item.currentValue, true);
});
// Changed or removed items aren't first change.
changes.forEachChangedItem((item) => {
newChanges[item.key] = new SimpleChange(item.previousValue, item.currentValue, false);
});
changes.forEachRemovedItem((item) => {
newChanges[item.key] = new SimpleChange(item.previousValue, item.currentValue, true);
});
return newChanges;
}
/**
* Extract the downloadable URLs from an HTML code.
*
* @param html HTML code.
* @return List of file urls.
* @deprecated since 3.8. Use CoreFilepoolProvider.extractDownloadableFilesFromHtml instead.
*/
extractDownloadableFilesFromHtml(html: string): string[] {
this.logger.error('The function extractDownloadableFilesFromHtml has been moved to CoreFilepoolProvider.' +
' Please use that function instead of this one.');
const urls = [];
let elements;
const element = this.convertToElement(html);
elements = element.querySelectorAll('a, img, audio, video, source, track');
for (let i = 0; i < elements.length; i++) {
const element = elements[i];
let url = element.tagName === 'A' ? element.href : element.src;
if (url && this.urlUtils.isDownloadableUrl(url) && urls.indexOf(url) == -1) {
urls.push(url);
}
// Treat video poster.
if (element.tagName == 'VIDEO' && element.getAttribute('poster')) {
url = element.getAttribute('poster');
if (url && this.urlUtils.isDownloadableUrl(url) && urls.indexOf(url) == -1) {
urls.push(url);
}
}
}
return urls;
}
/**
* Extract the downloadable URLs from an HTML code and returns them in fake file objects.
*
* @param html HTML code.
* @return List of fake file objects with file URLs.
* @deprecated since 3.8. Use CoreFilepoolProvider.extractDownloadableFilesFromHtmlAsFakeFileObjects instead.
*/
extractDownloadableFilesFromHtmlAsFakeFileObjects(html: string): any[] {
const urls = this.extractDownloadableFilesFromHtml(html);
// Convert them to fake file objects.
return urls.map((url) => {
return {
fileurl: url
};
});
}
/**
* Search all the URLs in a CSS file content.
*
* @param code CSS code.
* @return List of URLs.
*/
extractUrlsFromCSS(code: string): string[] {
// First of all, search all the url(...) occurrences that don't include "data:".
const urls = [],
matches = code.match(/url\(\s*["']?(?!data:)([^)]+)\)/igm);
if (!matches) {
return urls;
}
// Extract the URL form each match.
matches.forEach((match) => {
const submatches = match.match(/url\(\s*['"]?([^'"]*)['"]?\s*\)/im);
if (submatches && submatches[1]) {
urls.push(submatches[1]);
}
});
return urls;
}
/**
* Fix syntax errors in HTML.
*
* @param html HTML text.
* @return Fixed HTML text.
*/
fixHtml(html: string): string {
this.template.innerHTML = html;
const attrNameRegExp = /[^\x00-\x20\x7F-\x9F"'>\/=]+/;
const fixElement = (element: Element): void => {
// Remove attributes with an invalid name.
Array.from(element.attributes).forEach((attr) => {
if (!attrNameRegExp.test(attr.name)) {
element.removeAttributeNode(attr);
}
});
Array.from(element.children).forEach(fixElement);
};
Array.from(this.template.content.children).forEach(fixElement);
return this.template.innerHTML;
}
/**
* Focus an element and open keyboard.
*
* @param el HTML element to focus.
*/
focusElement(el: HTMLElement): void {
if (el && el.focus) {
el.focus();
if (this.platform.is('android') && this.supportsInputKeyboard(el)) {
// On some Android versions the keyboard doesn't open automatically.
this.appProvider.openKeyboard();
}
}
}
/**
* Formats a size to be used as width/height of an element.
* If the size is already valid (like '500px' or '50%') it won't be modified.
* Returned size will have a format like '500px'.
*
* @param size Size to format.
* @return Formatted size. If size is not valid, returns an empty string.
*/
formatPixelsSize(size: any): string {
if (typeof size == 'string' && (size.indexOf('px') > -1 || size.indexOf('%') > -1 || size == 'auto' || size == 'initial')) {
// It seems to be a valid size.
return size;
}
size = parseInt(size, 10);
if (!isNaN(size)) {
return size + 'px';
}
return '';
}
/**
* Returns the contents of a certain selection in a DOM element.
*
* @param element DOM element to search in.
* @param selector Selector to search.
* @return Selection contents. Undefined if not found.
*/
getContentsOfElement(element: HTMLElement, selector: string): string {
if (element) {
const selected = element.querySelector(selector);
if (selected) {
return selected.innerHTML;
}
}
}
/**
* Get the data from a form. It will only collect elements that have a name.
*
* @param form The form to get the data from.
* @return Object with the data. The keys are the names of the inputs.
*/
getDataFromForm(form: HTMLFormElement): any {
if (!form || !form.elements) {
return {};
}
const data = {};
for (let i = 0; i < form.elements.length; i++) {
const element: any = form.elements[i],
name = element.name || '';
// Ignore submit inputs.
if (!name || element.type == 'submit' || element.tagName == 'BUTTON') {
continue;
}
// Get the value.
if (element.type == 'checkbox') {
data[name] = !!element.checked;
} else if (element.type == 'radio') {
if (element.checked) {
data[name] = element.value;
}
} else {
data[name] = element.value;
}
}
return data;
}
/**
* Returns the attribute value of a string element. Only the first element will be selected.
*
* @param html HTML element in string.
* @param attribute Attribute to get.
* @return Attribute value.
*/
getHTMLElementAttribute(html: string, attribute: string): string {
return this.convertToElement(html).children[0].getAttribute('src');
}
/**
* Returns height of an element.
*
* @param element DOM element to measure.
* @param usePadding Whether to use padding to calculate the measure.
* @param useMargin Whether to use margin to calculate the measure.
* @param useBorder Whether to use borders to calculate the measure.
* @param innerMeasure If inner measure is needed: padding, margin or borders will be substracted.
* @return Height in pixels.
*/
getElementHeight(element: any, usePadding?: boolean, useMargin?: boolean, useBorder?: boolean,
innerMeasure?: boolean): number {
return this.getElementMeasure(element, false, usePadding, useMargin, useBorder, innerMeasure);
}
/**
* Returns height or width of an element.
*
* @param element DOM element to measure.
* @param getWidth Whether to get width or height.
* @param usePadding Whether to use padding to calculate the measure.
* @param useMargin Whether to use margin to calculate the measure.
* @param useBorder Whether to use borders to calculate the measure.
* @param innerMeasure If inner measure is needed: padding, margin or borders will be substracted.
* @return Measure in pixels.
*/
getElementMeasure(element: any, getWidth?: boolean, usePadding?: boolean, useMargin?: boolean, useBorder?: boolean,
innerMeasure?: boolean): number {
const offsetMeasure = getWidth ? 'offsetWidth' : 'offsetHeight',
measureName = getWidth ? 'width' : 'height',
clientMeasure = getWidth ? 'clientWidth' : 'clientHeight',
priorSide = getWidth ? 'Left' : 'Top',
afterSide = getWidth ? 'Right' : 'Bottom';
let measure = element[offsetMeasure] || element[measureName] || element[clientMeasure] || 0;
// Measure not correctly taken.
if (measure <= 0) {
const style = getComputedStyle(element);
if (style && style.display == '') {
element.style.display = 'inline-block';
measure = element[offsetMeasure] || element[measureName] || element[clientMeasure] || 0;
element.style.display = '';
}
}
if (usePadding || useMargin || useBorder) {
const computedStyle = getComputedStyle(element);
let surround = 0;
if (usePadding) {
surround += this.getComputedStyleMeasure(computedStyle, 'padding' + priorSide) +
this.getComputedStyleMeasure(computedStyle, 'padding' + afterSide);
}
if (useMargin) {
surround += this.getComputedStyleMeasure(computedStyle, 'margin' + priorSide) +
this.getComputedStyleMeasure(computedStyle, 'margin' + afterSide);
}
if (useBorder) {
surround += this.getComputedStyleMeasure(computedStyle, 'border' + priorSide + 'Width') +
this.getComputedStyleMeasure(computedStyle, 'border' + afterSide + 'Width');
}
if (innerMeasure) {
measure = measure > surround ? measure - surround : 0;
} else {
measure += surround;
}
}
return measure;
}
/**
* Returns the computed style measure or 0 if not found or NaN.
*
* @param style Style from getComputedStyle.
* @param measure Measure to get.
* @return Result of the measure.
*/
getComputedStyleMeasure(style: any, measure: string): number {
return parseInt(style[measure], 10) || 0;
}
/**
* Get the HTML code to render a connection warning icon.
*
* @return HTML Code.
*/
getConnectionWarningIconHtml(): string {
return '<div text-center><span class="core-icon-with-badge">' +
'<ion-icon role="img" class="icon fa fa-wifi" aria-label="wifi"></ion-icon>' +
'<ion-icon class="icon fa fa-exclamation-triangle core-icon-badge"></ion-icon>' +
'</span></div>';
}
/**
* Returns width of an element.
*
* @param element DOM element to measure.
* @param usePadding Whether to use padding to calculate the measure.
* @param useMargin Whether to use margin to calculate the measure.
* @param useBorder Whether to use borders to calculate the measure.
* @param innerMeasure If inner measure is needed: padding, margin or borders will be substracted.
* @return Width in pixels.
*/
getElementWidth(element: any, usePadding?: boolean, useMargin?: boolean, useBorder?: boolean,
innerMeasure?: boolean): number {
return this.getElementMeasure(element, true, usePadding, useMargin, useBorder, innerMeasure);
}
/**
* Retrieve the position of a element relative to another element.
*
* @param container Element to search in.
* @param selector Selector to find the element to gets the position.
* @param positionParentClass Parent Class where to stop calculating the position. Default scroll-content.
* @return positionLeft, positionTop of the element relative to.
*/
getElementXY(container: HTMLElement, selector?: string, positionParentClass?: string): number[] {
let element: HTMLElement = <HTMLElement> (selector ? container.querySelector(selector) : container),
offsetElement,
positionTop = 0,
positionLeft = 0;
if (!positionParentClass) {
positionParentClass = 'scroll-content';
}
if (!element) {
return null;
}
while (element) {
positionLeft += (element.offsetLeft - element.scrollLeft + element.clientLeft);
positionTop += (element.offsetTop - element.scrollTop + element.clientTop);
offsetElement = element.offsetParent;
element = element.parentElement;
// Every parent class has to be checked but the position has to be got form offsetParent.
while (offsetElement != element && element) {
// If positionParentClass element is reached, stop adding tops.
if (element.className.indexOf(positionParentClass) != -1) {
element = null;
} else {
element = element.parentElement;
}
}
// Finally, check again.
if (element && element.className.indexOf(positionParentClass) != -1) {
element = null;
}
}
return [positionLeft, positionTop];
}
/**
* Given an error message, return a suitable error title.
*
* @param message The error message.
* @return Title.
*/
private getErrorTitle(message: string): any {
if (message == this.translate.instant('core.networkerrormsg') ||
message == this.translate.instant('core.fileuploader.errormustbeonlinetoupload')) {
return this.sanitizer.bypassSecurityTrustHtml(this.getConnectionWarningIconHtml());
}
return this.textUtils.decodeHTML(this.translate.instant('core.error'));
}
/**
* Get the error message from an error, including debug data if needed.
*
* @param error Message to show.
* @param needsTranslate Whether the error needs to be translated.
* @return Error message, null if no error should be displayed.
*/
getErrorMessage(error: any, needsTranslate?: boolean): string {
let extraInfo = '';
if (typeof error == 'object') {
if (this.debugDisplay) {
// Get the debug info. Escape the HTML so it is displayed as it is in the view.
if (error.debuginfo) {
extraInfo = '<br><br>' + this.textUtils.escapeHTML(error.debuginfo);
}
if (error.backtrace) {
extraInfo += '<br><br>' + this.textUtils.replaceNewLines(this.textUtils.escapeHTML(error.backtrace), '<br>');
}
// tslint:disable-next-line
console.error(error);
}
// We received an object instead of a string. Search for common properties.
if (error.coreCanceled) {
// It's a canceled error, don't display an error.
return null;
}
error = this.textUtils.getErrorMessageFromError(error);
if (!error) {
// No common properties found, just stringify it.
error = JSON.stringify(error);
extraInfo = ''; // No need to add extra info because it's already in the error.
}
// Try to remove tokens from the contents.
const matches = error.match(/token"?[=|:]"?(\w*)/, '');
if (matches && matches[1]) {
error = error.replace(new RegExp(matches[1], 'g'), 'secret');
}
}
if (error == CoreConstants.DONT_SHOW_ERROR) {
// The error shouldn't be shown, stop.
return null;
}
let message = this.textUtils.decodeHTML(needsTranslate ? this.translate.instant(error) : error);
if (extraInfo) {
message += extraInfo;
}
return message;
}
/**
* Retrieve component/directive instance.
* Please use this function only if you cannot retrieve the instance using parent/child methods: ViewChild (or similar)
* or Angular's injection.
*
* @param element The root element of the component/directive.
* @return The instance, undefined if not found.
*/
getInstanceByElement(element: Element): any {
const id = element.getAttribute(this.INSTANCE_ID_ATTR_NAME);
return this.instances[id];
}
/**
* Wait an element to exists using the findFunction.
*
* @param findFunction The function used to find the element.
* @return Resolved if found, rejected if too many tries.
*/
waitElementToExist(findFunction: Function): Promise<HTMLElement> {
const promiseInterval = {
promise: null,
resolve: null,
reject: null
};
let tries = 100;
promiseInterval.promise = new Promise((resolve, reject): void => {
promiseInterval.resolve = resolve;
promiseInterval.reject = reject;
});
const clear = setInterval(() => {
const element: HTMLElement = findFunction();
if (element) {
clearInterval(clear);
promiseInterval.resolve(element);
} else {
tries--;
if (tries <= 0) {
clearInterval(clear);
promiseInterval.reject();
}
}
}, 100);
return promiseInterval.promise;
}
/**
* Handle bootstrap tooltips in a certain element.
*
* @param element Element to check.
*/
handleBootstrapTooltips(element: HTMLElement): void {
const els = Array.from(element.querySelectorAll('[data-toggle="tooltip"]'));
els.forEach((el) => {
const content = el.getAttribute('title') || el.getAttribute('data-original-title'),
trigger = el.getAttribute('data-trigger') || 'hover focus',
treated = el.getAttribute('data-bstooltip-treated');
if (!content || treated === 'true' ||
(trigger.indexOf('hover') == -1 && trigger.indexOf('focus') == -1 && trigger.indexOf('click') == -1)) {
return;
}
el.setAttribute('data-bstooltip-treated', 'true'); // Mark it as treated.
// Store the title in data-original-title instead of title, like BS does.
el.setAttribute('data-original-title', content);
el.setAttribute('title', '');
el.addEventListener('click', (e) => {
const html = el.getAttribute('data-html');
const popover = this.popoverCtrl.create(CoreBSTooltipComponent, {
content: content,
html: html === 'true'
});
popover.present({
ev: e
});
});
});
}
/**
* Check if an element is outside of screen (viewport).
*
* @param scrollEl The element that must be scrolled.
* @param element DOM element to check.
* @return Whether the element is outside of the viewport.
*/
isElementOutsideOfScreen(scrollEl: HTMLElement, element: HTMLElement): boolean {
const elementRect = element.getBoundingClientRect();
let elementMidPoint,
scrollElRect,
scrollTopPos = 0;
if (!elementRect) {
return false;
}
elementMidPoint = Math.round((elementRect.bottom + elementRect.top) / 2);
scrollElRect = scrollEl.getBoundingClientRect();
scrollTopPos = (scrollElRect && scrollElRect.top) || 0;
return elementMidPoint > window.innerHeight || elementMidPoint < scrollTopPos;
}
/**
* Check if rich text editor is enabled.
*
* @return Promise resolved with boolean: true if enabled, false otherwise.
*/
isRichTextEditorEnabled(): Promise<boolean> {
if (this.isRichTextEditorSupported()) {
return this.configProvider.get(CoreConstants.SETTINGS_RICH_TEXT_EDITOR, true).then((enabled) => {
return !!enabled;
});
}
return Promise.resolve(false);
}
/**
* Check if rich text editor is supported in the platform.
*
* @return Whether it's supported.
*/
isRichTextEditorSupported(): boolean {
return true;
}
/**
* Move children from one HTMLElement to another.
*
* @param oldParent The old parent.
* @param newParent The new parent.
* @param prepend If true, adds the children to the beginning of the new parent.
* @return List of moved children.
*/
moveChildren(oldParent: HTMLElement, newParent: HTMLElement, prepend?: boolean): Node[] {
const movedChildren: Node[] = [];
const referenceNode = prepend ? newParent.firstChild : null;
while (oldParent.childNodes.length > 0) {
const child = oldParent.childNodes[0];
movedChildren.push(child);
newParent.insertBefore(child, referenceNode);
}
return movedChildren;
}
/**
* Search and remove a certain element from inside another element.
*
* @param element DOM element to search in.
* @param selector Selector to search.
*/
removeElement(element: HTMLElement, selector: string): void {
if (element) {
const selected = element.querySelector(selector);
if (selected) {
selected.remove();
}
}
}
/**
* Search and remove a certain element from an HTML code.
*
* @param html HTML code to change.
* @param selector Selector to search.
* @param removeAll True if it should remove all matches found, false if it should only remove the first one.
* @return HTML without the element.
*/
removeElementFromHtml(html: string, selector: string, removeAll?: boolean): string {
let selected;
const element = this.convertToElement(html);
if (removeAll) {
selected = element.querySelectorAll(selector);
for (let i = 0; i < selected.length; i++) {
selected[i].remove();
}
} else {
selected = element.querySelector(selector);
if (selected) {
selected.remove();
}
}
return element.innerHTML;
}
/**
* Remove a component/directive instance using the DOM Element.
*
* @param element The root element of the component/directive.
*/
removeInstanceByElement(element: Element): void {
const id = element.getAttribute(this.INSTANCE_ID_ATTR_NAME);
delete this.instances[id];
}
/**
* Remove a component/directive instance using the ID.
*
* @param id The ID to remove.
*/
removeInstanceById(id: string): void {
delete this.instances[id];
}
/**
* Search for certain classes in an element contents and replace them with the specified new values.
*
* @param element DOM element.
* @param map Mapping of the classes to replace. Keys must be the value to replace, values must be
* the new class name. Example: {'correct': 'core-question-answer-correct'}.
*/
replaceClassesInElement(element: HTMLElement, map: any): void {
for (const key in map) {
const foundElements = element.querySelectorAll('.' + key);
for (let i = 0; i < foundElements.length; i++) {
const foundElement = foundElements[i];
foundElement.className = foundElement.className.replace(key, map[key]);
}
}
}
/**
* Given an HTML, search all links and media and tries to restore original sources using the paths object.
*
* @param html HTML code.
* @param paths Object linking URLs in the html code with the real URLs to use.
* @param anchorFn Function to call with each anchor. Optional.
* @return Treated HTML code.
*/
restoreSourcesInHtml(html: string, paths: object, anchorFn?: Function): string {
let media,
anchors;
const element = this.convertToElement(html);
// Treat elements with src (img, audio, video, ...).
media = Array.from(element.querySelectorAll('img, video, audio, source, track'));
media.forEach((media: HTMLElement) => {
let newSrc = paths[this.textUtils.decodeURIComponent(media.getAttribute('src'))];
if (typeof newSrc != 'undefined') {
media.setAttribute('src', newSrc);
}
// Treat video posters.
if (media.tagName == 'VIDEO' && media.getAttribute('poster')) {
newSrc = paths[this.textUtils.decodeURIComponent(media.getAttribute('poster'))];
if (typeof newSrc !== 'undefined') {
media.setAttribute('poster', newSrc);
}
}
});
// Now treat links.
anchors = Array.from(element.querySelectorAll('a'));
anchors.forEach((anchor: HTMLElement) => {
const href = this.textUtils.decodeURIComponent(anchor.getAttribute('href')),
newUrl = paths[href];
if (typeof newUrl != 'undefined') {
anchor.setAttribute('href', newUrl);
if (typeof anchorFn == 'function') {
anchorFn(anchor, href);
}
}
});
return element.innerHTML;
}
/**
* Scroll to somehere in the content.
* Checks hidden property _scroll to avoid errors if view is not active.
*
* @param content Content where to execute the function.
* @param x The x-value to scroll to.
* @param y The y-value to scroll to.
* @param duration Duration of the scroll animation in milliseconds. Defaults to `300`.
* @return Returns a promise which is resolved when the scroll has completed.
*/
scrollTo(content: Content, x: number, y: number, duration?: number, done?: Function): Promise<any> {
return content && content._scroll && content.scrollTo(x, y, duration, done);
}
/**
* Scroll to Bottom of the content.
* Checks hidden property _scroll to avoid errors if view is not active.
*
* @param content Content where to execute the function.
* @param duration Duration of the scroll animation in milliseconds. Defaults to `300`.
* @return Returns a promise which is resolved when the scroll has completed.
*/
scrollToBottom(content: Content, duration?: number): Promise<any> {
return content && content._scroll && content.scrollToBottom(duration);
}
/**
* Scroll to Top of the content.
* Checks hidden property _scroll to avoid errors if view is not active.
*
* @param content Content where to execute the function.
* @param duration Duration of the scroll animation in milliseconds. Defaults to `300`.
* @return Returns a promise which is resolved when the scroll has completed.
*/
scrollToTop(content: Content, duration?: number): Promise<any> {
return content && content._scroll && content.scrollToTop(duration);
}
/**
* Returns contentHeight of the content.
* Checks hidden property _scroll to avoid errors if view is not active.
*
* @param content Content where to execute the function.
* @return Content contentHeight or 0.
*/
getContentHeight(content: Content): number {
return (content && content._scroll && content.contentHeight) || 0;
}
/**
* Returns scrollHeight of the content.
* Checks hidden property _scroll to avoid errors if view is not active.
*
* @param content Content where to execute the function.
* @return Content scrollHeight or 0.
*/
getScrollHeight(content: Content): number {
return (content && content._scroll && content.scrollHeight) || 0;
}
/**
* Returns scrollTop of the content.
* Checks hidden property _scrollContent to avoid errors if view is not active.
* Using navite value of scroll to avoid having non updated values.
*
* @param content Content where to execute the function.
* @return Content scrollTop or 0.
*/
getScrollTop(content: Content): number {
return (content && content._scrollContent && content._scrollContent.nativeElement.scrollTop) || 0;
}
/**
* Scroll to a certain element.
*
* @param content The content that must be scrolled.
* @param element The element to scroll to.
* @param scrollParentClass Parent class where to stop calculating the position. Default scroll-content.
* @return True if the element is found, false otherwise.
*/
scrollToElement(content: Content, element: HTMLElement, scrollParentClass?: string): boolean {
const position = this.getElementXY(element, undefined, scrollParentClass);
if (!position) {
return false;
}
this.scrollTo(content, position[0], position[1]);
return true;
}
/**
* Scroll to a certain element using a selector to find it.
*
* @param content The content that must be scrolled.
* @param selector Selector to find the element to scroll to.
* @param scrollParentClass Parent class where to stop calculating the position. Default scroll-content.
* @return True if the element is found, false otherwise.
*/
scrollToElementBySelector(content: Content, selector: string, scrollParentClass?: string): boolean {
const position = this.getElementXY(content.getScrollElement(), selector, scrollParentClass);
if (!position) {
return false;
}
this.scrollTo(content, position[0], position[1]);
return true;
}
/**
* Search for an input with error (core-input-error directive) and scrolls to it if found.
*
* @param content The content that must be scrolled.
* @param [scrollParentClass] Parent class where to stop calculating the position. Default scroll-content.
* @return True if the element is found, false otherwise.
*/
scrollToInputError(content: Content, scrollParentClass?: string): boolean {
if (!content) {
return false;
}
return this.scrollToElementBySelector(content, '.core-input-error', scrollParentClass);
}
/**
* Set whether debug messages should be displayed.
*
* @param value Whether to display or not.
*/
setDebugDisplay(value: boolean): void {
this.debugDisplay = value;
}
/**
* Show an alert modal with a button to close it.
*
* @param title Title to show.
* @param message Message to show.
* @param buttonText Text of the button.
* @param autocloseTime Number of milliseconds to wait to close the modal. If not defined, modal won't be closed.
* @return Promise resolved with the alert modal.
*/
showAlert(title: string, message: string, buttonText?: string, autocloseTime?: number): Promise<CoreAlert> {
const hasHTMLTags = this.textUtils.hasHTMLTags(message);
let promise;
if (hasHTMLTags) {
// Format the text.
promise = this.textUtils.formatText(message);
} else {
promise = Promise.resolve(message);
}
return promise.then((message) => {
const alertId = <string> Md5.hashAsciiStr((title || '') + '#' + (message || ''));
if (this.displayedAlerts[alertId]) {
// There's already an alert with the same message and title. Return it.
return this.displayedAlerts[alertId];
}
const alert: CoreAlert = <any> this.alertCtrl.create({
title: title,
message: message,
buttons: [buttonText || this.translate.instant('core.ok')]
});
alert.present().then(() => {
if (hasHTMLTags) {
// Treat all anchors so they don't override the app.
const alertMessageEl: HTMLElement = alert.pageRef().nativeElement.querySelector('.alert-message');
this.treatAnchors(alertMessageEl);
}
});
// Store the alert and remove it when dismissed.
this.displayedAlerts[alertId] = alert;
// Define the observables to extend the Alert class. This will allow several callbacks instead of just one.
alert.didDismiss = new Subject();
alert.willDismiss = new Subject();
// Set the callbacks to trigger an observable event.
alert.onDidDismiss((data: any, role: string) => {
delete this.displayedAlerts[alertId];
alert.didDismiss.next({data: data, role: role});
});
alert.onWillDismiss((data: any, role: string) => {
alert.willDismiss.next({data: data, role: role});
});
if (autocloseTime > 0) {
setTimeout(() => {
alert.dismiss();
}, autocloseTime);
}
return alert;
});
}
/**
* Show an alert modal with a button to close it, translating the values supplied.
*
* @param title Title to show.
* @param message Message to show.
* @param buttonText Text of the button.
* @param autocloseTime Number of milliseconds to wait to close the modal. If not defined, modal won't be closed.
* @return Promise resolved with the alert modal.
*/
showAlertTranslated(title: string, message: string, buttonText?: string, autocloseTime?: number): Promise<Alert> {
title = title ? this.translate.instant(title) : title;
message = message ? this.translate.instant(message) : message;
buttonText = buttonText ? this.translate.instant(buttonText) : buttonText;
return this.showAlert(title, message, buttonText, autocloseTime);
}
/**
* Shortcut for a delete confirmation modal.
*
* @param translateMessage String key to show in the modal body translated. Default: 'core.areyousure'.
* @param translateArgs Arguments to pass to translate if necessary.
* @param options More options. See https://ionicframework.com/docs/v3/api/components/alert/AlertController/
* @return Promise resolved if the user confirms and rejected with a canceled error if he cancels.
*/
showDeleteConfirm(translateMessage: string = 'core.areyousure', translateArgs: any = {}, options?: any): Promise<any> {
return this.showConfirm(this.translate.instant(translateMessage, translateArgs), undefined,
this.translate.instant('core.delete'), undefined, options);
}
/**
* Show a confirm modal.
*
* @param message Message to show in the modal body.
* @param title Title of the modal.
* @param okText Text of the OK button.
* @param cancelText Text of the Cancel button.
* @param options More options. See https://ionicframework.com/docs/v3/api/components/alert/AlertController/
* @return Promise resolved if the user confirms and rejected with a canceled error if he cancels.
*/
showConfirm(message: string, title?: string, okText?: string, cancelText?: string, options?: any): Promise<any> {
return new Promise<void>((resolve, reject): void => {
const hasHTMLTags = this.textUtils.hasHTMLTags(message);
let promise;
if (hasHTMLTags) {
// Format the text.
promise = this.textUtils.formatText(message);
} else {
promise = Promise.resolve(message);
}
promise.then((message) => {
options = options || {};
options.message = message;
options.title = title;
if (!title) {
options.cssClass = 'core-nohead';
}
options.buttons = [
{
text: cancelText || this.translate.instant('core.cancel'),
role: 'cancel',
handler: (): void => {
reject(this.createCanceledError());
}
},
{
text: okText || this.translate.instant('core.ok'),
handler: (data: any): void => {
resolve(data);
}
}
];
const alert = this.alertCtrl.create(options);
alert.present().then(() => {
if (hasHTMLTags) {
// Treat all anchors so they don't override the app.
const alertMessageEl: HTMLElement = alert.pageRef().nativeElement.querySelector('.alert-message');
this.treatAnchors(alertMessageEl);
}
});
});
});
}
/**
* Show an alert modal with an error message.
*
* @param error Message to show.
* @param needsTranslate Whether the error needs to be translated.
* @param autocloseTime Number of milliseconds to wait to close the modal. If not defined, modal won't be closed.
* @return Promise resolved with the alert modal.
*/
showErrorModal(error: any, needsTranslate?: boolean, autocloseTime?: number): Promise<Alert> {
const message = this.getErrorMessage(error, needsTranslate);
if (message === null) {
// Message doesn't need to be displayed, stop.
return Promise.resolve(null);
}
return this.showAlert(this.getErrorTitle(message), message, undefined, autocloseTime);
}
/**
* Show an alert modal with an error message. It uses a default message if error is not a string.
*
* @param error Message to show.
* @param defaultError Message to show if the error is not a string.
* @param needsTranslate Whether the error needs to be translated.
* @param autocloseTime Number of milliseconds to wait to close the modal. If not defined, modal won't be closed.
* @return Promise resolved with the alert modal.
*/
showErrorModalDefault(error: any, defaultError: any, needsTranslate?: boolean, autocloseTime?: number): Promise<Alert> {
if (error && error.coreCanceled) {
// It's a canceled error, don't display an error.
return;
}
let errorMessage = error;
if (error && typeof error != 'string') {
errorMessage = this.textUtils.getErrorMessageFromError(error);
}
return this.showErrorModal(typeof errorMessage == 'string' ? error : defaultError, needsTranslate, autocloseTime);
}
/**
* Show an alert modal with the first warning error message. It uses a default message if error is not a string.
*
* @param warnings Warnings returned.
* @param defaultError Message to show if the error is not a string.
* @param needsTranslate Whether the error needs to be translated.
* @param autocloseTime Number of milliseconds to wait to close the modal. If not defined, modal won't be closed.
* @return Promise resolved with the alert modal.
*/
showErrorModalFirstWarning(warnings: any, defaultError: any, needsTranslate?: boolean, autocloseTime?: number): Promise<Alert> {
const error = warnings && warnings.length && warnings[0].message;
return this.showErrorModalDefault(error, defaultError, needsTranslate, autocloseTime);
}
/**
* Displays a loading modal window.
*
* @param text The text of the modal window. Default: core.loading.
* @param needsTranslate Whether the 'text' needs to be translated.
* @return Loading modal instance.
* @description
* Usage:
* let modal = domUtils.showModalLoading(myText);
* ...
* modal.dismiss();
*/
showModalLoading(text?: string, needsTranslate?: boolean): Loading {
if (!text) {
text = this.translate.instant('core.loading');
} else if (needsTranslate) {
text = this.translate.instant(text);
}
const loader = this.loadingCtrl.create({
content: text
}),
dismiss = loader.dismiss.bind(loader);
let isPresented = false,
isDismissed = false;
// Override dismiss to prevent dismissing a modal twice (it can throw an error and cause problems).
loader.dismiss = (data, role, navOptions): Promise<any> => {
if (!isPresented || isDismissed) {
isDismissed = true;
return Promise.resolve();
}
isDismissed = true;
return dismiss(data, role, navOptions);
};
// Wait a bit before presenting the modal, to prevent it being displayed if dissmiss is called fast.
setTimeout(() => {
if (!isDismissed) {
isPresented = true;
loader.present();
}
}, 40);
return loader;
}
/**
* Show a prompt modal to input some data.
*
* @param message Modal message.
* @param title Modal title.
* @param placeholder Placeholder of the input element. By default, "Password".
* @param type Type of the input element. By default, password.
* @return Promise resolved with the input data if the user clicks OK, rejected if cancels.
*/
showPrompt(message: string, title?: string, placeholder?: string, type: string = 'password'): Promise<any> {
return new Promise((resolve, reject): void => {
const hasHTMLTags = this.textUtils.hasHTMLTags(message);
let promise;
if (hasHTMLTags) {
// Format the text.
promise = this.textUtils.formatText(message);
} else {
promise = Promise.resolve(message);
}
promise.then((message) => {
const alert = this.alertCtrl.create({
message: message,
title: title,
inputs: [
{
name: 'promptinput',
placeholder: placeholder || this.translate.instant('core.login.password'),
type: type
}
],
buttons: [
{
text: this.translate.instant('core.cancel'),
role: 'cancel',
handler: (): void => {
reject();
}
},
{
text: this.translate.instant('core.ok'),
handler: (data): void => {
resolve(data.promptinput);
}
}
]
});
alert.present().then(() => {
if (hasHTMLTags) {
// Treat all anchors so they don't override the app.
const alertMessageEl: HTMLElement = alert.pageRef().nativeElement.querySelector('.alert-message');
this.treatAnchors(alertMessageEl);
}
});
});
});
}
/**
* Displays an autodimissable toast modal window.
*
* @param text The text of the toast.
* @param needsTranslate Whether the 'text' needs to be translated.
* @param duration Duration in ms of the dimissable toast.
* @param cssClass Class to add to the toast.
* @param dismissOnPageChange Dismiss the Toast on page change.
* @return Toast instance.
*/
showToast(text: string, needsTranslate?: boolean, duration: number = 2000, cssClass: string = '',
dismissOnPageChange: boolean = true): Toast {
if (needsTranslate) {
text = this.translate.instant(text);
}
const loader = this.toastCtrl.create({
message: text,
duration: duration,
position: 'bottom',
cssClass: cssClass,
dismissOnPageChange: dismissOnPageChange
});
loader.present();
return loader;
}
/**
* Stores a component/directive instance.
*
* @param element The root element of the component/directive.
* @param instance The instance to store.
* @return ID to identify the instance.
*/
storeInstanceByElement(element: Element, instance: any): string {
const id = String(this.lastInstanceId++);
element.setAttribute(this.INSTANCE_ID_ATTR_NAME, id);
this.instances[id] = instance;
return id;
}
/**
* Check if an element supports input via keyboard.
*
* @param el HTML element to check.
* @return Whether it supports input using keyboard.
*/
supportsInputKeyboard(el: any): boolean {
return el && !el.disabled && (el.tagName.toLowerCase() == 'textarea' ||
(el.tagName.toLowerCase() == 'input' && this.INPUT_SUPPORT_KEYBOARD.indexOf(el.type) != -1));
}
/**
* Converts HTML formatted text to DOM element(s).
*
* @param text HTML text.
* @return Same text converted to HTMLCollection.
*/
toDom(text: string): HTMLCollection {
const element = this.convertToElement(text);
return element.children;
}
/**
* Treat anchors inside alert/modals.
*
* @param container The HTMLElement that can contain anchors.
*/
treatAnchors(container: HTMLElement): void {
const anchors = Array.from(container.querySelectorAll('a'));
anchors.forEach((anchor) => {
anchor.addEventListener('click', (event) => {
if (event.defaultPrevented) {
// Stop.
return;
}
const href = anchor.getAttribute('href');
if (href) {
event.preventDefault();
event.stopPropagation();
// We cannot use CoreDomUtilsProvider.openInBrowser due to circular dependencies.
if (this.appProvider.isDesktop()) {
// It's a desktop app, use Electron shell library to open the browser.
const shell = require('electron').shell;
if (!shell.openExternal(href)) {
// Open browser failed, open a new window in the app.
window.open(href, '_system');
}
} else {
window.open(href, '_system');
}
}
});
});
}
/**
* View an image in a modal.
*
* @param image URL of the image.
* @param title Title of the page or modal.
* @param component Component to link the image to if needed.
* @param componentId An ID to use in conjunction with the component.
* @param fullScreen Whether the modal should be full screen.
*/
viewImage(image: string, title?: string, component?: string, componentId?: string | number, fullScreen?: boolean): void {
if (image) {
const params: any = {
title: title,
image: image,
component: component,
componentId: componentId,
};
const options = fullScreen ? { cssClass: 'core-modal-fullscreen' } : {};
const modal = this.modalCtrl.create('CoreViewerImagePage', params, options);
modal.present();
}
}
/**
* Wait for images to load.
*
* @param element The element to search in.
* @return Promise resolved with a boolean: whether there was any image to load.
*/
waitForImages(element: HTMLElement): Promise<boolean> {
const imgs = Array.from(element.querySelectorAll('img')),
promises = [];
let hasImgToLoad = false;
imgs.forEach((img) => {
if (img && !img.complete) {
hasImgToLoad = true;
// Wait for image to load or fail.
promises.push(new Promise((resolve, reject): void => {
const imgLoaded = (): void => {
resolve();
img.removeEventListener('load', imgLoaded);
img.removeEventListener('error', imgLoaded);
};
img.addEventListener('load', imgLoaded);
img.addEventListener('error', imgLoaded);
}));
}
});
return Promise.all(promises).then(() => {
return hasImgToLoad;
});
}
/**
* Wrap an HTMLElement with another element.
*
* @param el The element to wrap.
* @param wrapper Wrapper.
*/
wrapElement(el: HTMLElement, wrapper: HTMLElement): void {
// Insert the wrapper before the element.
el.parentNode.insertBefore(wrapper, el);
// Now move the element into the wrapper.
wrapper.appendChild(el);
}
/**
* Trigger form cancelled event.
*
* @param form Form element.
* @param siteId The site affected. If not provided, no site affected.
*/
triggerFormCancelledEvent(formRef: ElementRef, siteId?: string): void {
if (!formRef) {
return;
}
this.eventsProvider.trigger(CoreEventsProvider.FORM_ACTION, {
action: 'cancel',
form: formRef.nativeElement,
}, siteId);
}
/**
* Trigger form submitted event.
*
* @param form Form element.
* @param online Whether the action was done in offline or not.
* @param siteId The site affected. If not provided, no site affected.
*/
triggerFormSubmittedEvent(formRef: ElementRef, online?: boolean, siteId?: string): void {
if (!formRef) {
return;
}
this.eventsProvider.trigger(CoreEventsProvider.FORM_ACTION, {
action: 'submit',
form: formRef.nativeElement,
online: !!online,
}, siteId);
}
}<|fim▁end|>
|
}
return this.translate.instant('core.course.availablespace', {available: availableSize});
}
|
<|file_name|>script_lol.cpp<|end_file_name|><|fim▁begin|>/* ScummVM - Graphic Adventure Engine
*
* ScummVM is the legal property of its developers, whose names
* are too numerous to list here. Please refer to the COPYRIGHT
* file distributed with this source distribution.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $URL$
* $Id$
*
*/
#ifdef ENABLE_LOL
#include "kyra/lol.h"
#include "kyra/screen_lol.h"
#include "kyra/timer.h"
#include "kyra/resource.h"
#include "common/endian.h"
namespace Kyra {
void LoLEngine::runInitScript(const char *filename, int optionalFunc) {
_suspendScript = true;
EMCData scriptData;
EMCState scriptState;
memset(&scriptData, 0, sizeof(EMCData));
_emc->unload(&_scriptData);
_emc->load(filename, &scriptData, &_opcodes);
_emc->init(&scriptState, &scriptData);
_emc->start(&scriptState, 0);
while (_emc->isValid(&scriptState))
_emc->run(&scriptState);
if (optionalFunc) {
_emc->init(&scriptState, &scriptData);
_emc->start(&scriptState, optionalFunc);
while (_emc->isValid(&scriptState))
_emc->run(&scriptState);
}
_emc->unload(&scriptData);
_suspendScript = false;
}
void LoLEngine::runInfScript(const char *filename) {
_emc->unload(&_scriptData);
_emc->load(filename, &_scriptData, &_opcodes);
runLevelScript(0x400, -1);
}
void LoLEngine::runLevelScript(int block, int flags) {
runLevelScriptCustom(block, flags, -1, 0, 0, 0);
}
void LoLEngine::runLevelScriptCustom(int block, int flags, int charNum, int item, int reg3, int reg4) {
EMCState scriptState;
memset(&scriptState, 0, sizeof(EMCState));
if (!_suspendScript) {
_emc->init(&scriptState, &_scriptData);
_emc->start(&scriptState, block);
scriptState.regs[0] = flags;
scriptState.regs[1] = charNum;
scriptState.regs[2] = item;
scriptState.regs[3] = reg3;
scriptState.regs[4] = reg4;
scriptState.regs[5] = block;
scriptState.regs[6] = _scriptDirection;
if (_emc->isValid(&scriptState)) {
if (*(scriptState.ip - 1) & flags) {
while (_emc->isValid(&scriptState))
_emc->run(&scriptState);
}
}
}
checkSceneUpdateNeed(block);
}
bool LoLEngine::checkSceneUpdateNeed(int func) {
if (_sceneUpdateRequired)
return true;
for (int i = 0; i < 15; i++) {
if (_visibleBlockIndex[i] == func) {
_sceneUpdateRequired = true;
return true;
}
}
if (_currentBlock == func){
_sceneUpdateRequired = true;
return true;
}
return false;
}
int LoLEngine::olol_setWallType(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setWallType(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (stackPos(2) != -1) {
if (_wllWallFlags[stackPos(2)] & 4)
deleteMonstersFromBlock(stackPos(0));
}
setWallType(stackPos(0), stackPos(1), stackPos(2));
return 1;
}
int LoLEngine::olol_getWallType(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getWallType(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
return _levelBlockProperties[stackPos(0)].walls[stackPos(1) & 3];
}
int LoLEngine::olol_drawScene(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_drawScene(%p) (%d)", (const void *)script, stackPos(0));
drawScene(stackPos(0));
return 1;
}
int LoLEngine::olol_rollDice(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_rollDice(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
return rollDice(stackPos(0), stackPos(1));
}
int LoLEngine::olol_moveParty(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_moveParty(%p) (%d)", (const void *)script, stackPos(0));
int mode = stackPos(0);
if (mode > 5 && mode < 10)
mode = (mode - 6 - _currentDirection) & 3;
Button b;
b.data0Val2 = b.data1Val2 = b.data2Val2 = 0xfe;
b.data0Val3 = b.data1Val3 = b.data2Val3 = 0x01;
switch (mode) {
case 0:
clickedUpArrow(&b);
break;
case 1:
clickedRightArrow(&b);
break;
case 2:
clickedDownArrow(&b);
break;
case 3:
clickedLeftArrow(&b);
break;
case 4:
clickedTurnLeftArrow(&b);
break;
case 5:
clickedTurnRightArrow(&b);
break;
case 10:
case 11:
case 12:
case 13:
mode = ABS(mode - 10 - _currentDirection);
if (mode > 2)
mode = (mode ^ 2) * -1;
while (mode) {
if (mode > 0) {
clickedTurnRightArrow(&b);
mode--;
} else {
clickedTurnLeftArrow(&b);
mode++;
}
}
break;
default:
break;
}
return 1;
}
int LoLEngine::olol_delay(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_delay(%p) (%d)", (const void *)script, stackPos(0));
delay(stackPos(0) * _tickLength, true);
return 1;
}
int LoLEngine::olol_setGameFlag(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setGameFlag(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
if (stackPos(1))
setGameFlag(stackPos(0));
else
resetGameFlag(stackPos(0));
return 1;
}
int LoLEngine::olol_testGameFlag(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_testGameFlag(%p) (%d)", (const void *)script, stackPos(0));
if (stackPos(0) < 0)
return 0;
return queryGameFlag(stackPos(0));
}
int LoLEngine::olol_loadLevelGraphics(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadLevelGraphics(%p) (%s, %d, %d, %d, %d, %d)", (const void *)script, stackPosString(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5));
loadLevelGraphics(stackPosString(0), stackPos(1), stackPos(2), stackPos(3) == -1 ? -1 : (uint16)stackPos(3), stackPos(4) == -1 ? -1 : (uint16)stackPos(4), (stackPos(5) == -1) ? 0 : stackPosString(5));
return 1;
}
int LoLEngine::olol_loadBlockProperties(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadBlockProperties(%p) (%s)", (const void *)script, stackPosString(0));
loadBlockProperties(stackPosString(0));
return 1;
}
int LoLEngine::olol_loadMonsterShapes(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadMonsterShapes(%p) (%s, %d, %d)", (const void *)script, stackPosString(0), stackPos(1), stackPos(2));
loadMonsterShapes(stackPosString(0), stackPos(1), stackPos(2));
return 1;
}
int LoLEngine::olol_deleteHandItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_deleteHandItem(%p) ()", (const void *)script);
int r = _itemInHand;
deleteItem(_itemInHand);
setHandItem(0);
return r;
}
int LoLEngine::olol_allocItemPropertiesBuffer(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_allocItemPropertiesBuffer(%p) (%d)", (const void *)script, stackPos(0));
delete[] _itemProperties;
_itemProperties = new ItemProperty[stackPos(0)];
return 1;
}
int LoLEngine::olol_setItemProperty(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setItemProperty(%p) (%d, %d, %d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8), stackPos(9));
ItemProperty *tmp = &_itemProperties[stackPos(0)];
tmp->nameStringId = stackPos(1);
tmp->shpIndex = stackPos(2);
tmp->type = stackPos(3);
// WORKAROUND for unpatched early floppy versions.
// The Vaelan's cube should not be able to be equipped in a weapon slot.
if (stackPos(0) == 264 && tmp->type == 5)
tmp->type = 0;
tmp->itemScriptFunc = stackPos(4);
tmp->might = stackPos(5);
tmp->skill = stackPos(6);
tmp->protection = stackPos(7);
tmp->flags = stackPos(8);
tmp->unkB = stackPos(9);
return 1;
}
int LoLEngine::olol_makeItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_makeItem(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
return makeItem(stackPos(0), stackPos(1), stackPos(2));
}
int LoLEngine::olol_placeMoveLevelItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setItemProperty(%p) (%d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5));
placeMoveLevelItem(stackPos(0), stackPos(1), stackPos(2), stackPos(3) & 0xff, stackPos(4) & 0xff, stackPos(5));
return 1;
}
int LoLEngine::olol_createLevelItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setItemProperty(%p) (%d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7));
int item = makeItem(stackPos(0), stackPos(1), stackPos(2));
if (item == -1)
return item;
placeMoveLevelItem(item, stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7));
return item;
}
int LoLEngine::olol_getItemPara(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getItemPara(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
if (!stackPos(0))
return 0;
ItemInPlay *i = &_itemsInPlay[stackPos(0)];
ItemProperty *p = &_itemProperties[i->itemPropertyIndex];
switch (stackPos(1)) {
case 0:
return i->block;
case 1:
return i->x;
case 2:
return i->y;
case 3:
return i->level;
case 4:
return i->itemPropertyIndex;
case 5:
return i->shpCurFrame_flg;
case 6:
return p->nameStringId;
case 7:
break;
case 8:
return p->shpIndex;
case 9:
return p->type;
case 10:
return p->itemScriptFunc;
case 11:
return p->might;
case 12:
return p->skill;
case 13:
return p->protection;
case 14:
return p->unkB;
case 15:
return i->shpCurFrame_flg & 0x1fff;
case 16:
return p->flags;
case 17:
return (p->skill << 8) | ((uint8)p->might);
default:
break;
}
return -1;
}
int LoLEngine::olol_getCharacterStat(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getCharacterStat(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
LoLCharacter *c = &_characters[stackPos(0)];
int d = stackPos(2);
switch (stackPos(1)) {
case 0:
return c->flags;
case 1:
return c->raceClassSex;
case 5:
return c->hitPointsCur;
case 6:
return c->hitPointsMax;
case 7:
return c->magicPointsCur;
case 8:
return c->magicPointsMax;
case 9:
return c->itemProtection;
case 10:
return c->items[d];
case 11:
return c->skillLevels[d] + c->skillModifiers[d];
case 12:
return c->protectionAgainstItems[d];
case 13:
return (d & 0x80) ? c->itemsMight[7] : c->itemsMight[d];
case 14:
return c->skillModifiers[d];
case 15:
return c->id;
default:
break;
}
return 0;
}
int LoLEngine::olol_setCharacterStat(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setCharacterStat(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
LoLCharacter *c = &_characters[stackPos(0)];
int d = stackPos(2);
int e = stackPos(3);
switch (stackPos(1)) {
case 0:
c->flags = e;
break;
case 1:
c->raceClassSex = e & 0x0f;
break;
case 5:
setCharacterMagicOrHitPoints(stackPos(0), 0, e, 0);
break;
case 6:
c->hitPointsMax = e;
break;
case 7:
setCharacterMagicOrHitPoints(stackPos(0), 1, e, 0);
break;
case 8:
c->magicPointsMax = e;
break;
case 9:
c->itemProtection = e;
break;
case 10:
c->items[d] = 0;
break;
case 11:
c->skillLevels[d] = e;
break;
case 12:
c->protectionAgainstItems[d] = e;
break;
case 13:
if (d & 0x80)
c->itemsMight[7] = e;
else
c->itemsMight[d] = e;
break;
case 14:
c->skillModifiers[d] = e;
break;
default:
break;
}
return 0;
}
int LoLEngine::olol_loadLevelShapes(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadLevelShapes(%p) (%s, %s)", (const void *)script, stackPosString(0), stackPosString(1));
loadLevelShpDat(stackPosString(0), stackPosString(1), true);
return 1;
}
int LoLEngine::olol_closeLevelShapeFile(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_closeLevelShapeFile(%p) ()", (const void *)script);
delete _lvlShpFileHandle;
_lvlShpFileHandle = 0;
return 1;
}
int LoLEngine::olol_loadDoorShapes(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadDoorShapes(%p) (%s, %d, %d)", (const void *)script, stackPosString(0), stackPos(1), stackPos(2));
_screen->loadBitmap(stackPosString(0), 3, 3, 0);
const uint8 *p = _screen->getCPagePtr(2);
if (_doorShapes[0])
delete[] _doorShapes[0];
_doorShapes[0] = _screen->makeShapeCopy(p, stackPos(1));
if (_doorShapes[1])
delete[] _doorShapes[1];
_doorShapes[1] = _screen->makeShapeCopy(p, stackPos(2));
for (int i = 0; i < 20; i++) {
_wllWallFlags[i + 3] |= 7;
int t = i % 5;
if (t == 4)
_wllWallFlags[i + 3] &= 0xf8;
if (t == 3)
_wllWallFlags[i + 3] &= 0xfd;
}
if (stackPos(3)) {
for (int i = 3; i < 13; i++)
_wllWallFlags[i] &= 0xfd;
}
if (stackPos(4)) {
for (int i = 13; i < 23; i++)
_wllWallFlags[i] &= 0xfd;
}
return 1;
}
int LoLEngine::olol_initAnimStruct(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_initAnimStruct(%p) (%s, %d, %d, %d, %d, %d)", (const void *)script, stackPosString(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5));
if (_tim->initAnimStruct(stackPos(1), stackPosString(0), stackPos(2), stackPos(3), stackPos(4), 0, stackPos(5)))
return 1;
return 0;
}
int LoLEngine::olol_playAnimationPart(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playAnimationPart(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
_animator->playPart(stackPos(0), stackPos(1), stackPos(2), stackPos(3));
return 1;
}
int LoLEngine::olol_freeAnimStruct(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_freeAnimStruct(%p) (%d)", (const void *)script, stackPos(0));
if (_tim->freeAnimStruct(stackPos(0)))
return 1;
return 0;
}
int LoLEngine::olol_getDirection(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getDirection(%p)", (const void *)script);
return _currentDirection;
}
int LoLEngine::olol_characterSurpriseFeedback(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_characterSurpriseFeedback(%p)", (const void *)script);
for (int i = 0; i < 4; i++) {
if (!(_characters[i].flags & 1) || _characters[i].id >= 0)
continue;
int s = -_characters[i].id;
int sfx = (s == 1) ? 136 : ((s == 5) ? 50 : ((s == 8) ? 49 : ((s == 9) ? 48 : 0)));
if (sfx)
snd_playSoundEffect(sfx, -1);
return 1;
}
return 1;
}
int LoLEngine::olol_setMusicTrack(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setMusicTrack(%p) (%d)", (const void *)script, stackPos(0));
_curMusicTheme = stackPos(0);
return 1;
}
int LoLEngine::olol_setSequenceButtons(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setSequenceButtons(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
setSequenceButtons(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
return 1;
}
int LoLEngine::olol_setDefaultButtonState(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setDefaultButtonState(%p)", (const void *)script);
setDefaultButtonState();
return 1;
}
int LoLEngine::olol_checkRectForMousePointer(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkRectForMousePointer(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
return posWithinRect(_mouseX, _mouseY, stackPos(0), stackPos(1), stackPos(2), stackPos(3)) ? 1 : 0;
}
int LoLEngine::olol_clearDialogueField(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_clearDialogueField(%p) (%d)", (const void *)script, stackPos(0));
if (_currentControlMode && (!textEnabled()))
return 1;
_screen->setScreenDim(5);
const ScreenDim *d = _screen->getScreenDim(5);
_screen->fillRect(d->sx, d->sy, d->sx + d->w - (_flags.use16ColorMode ? 3 : 2), d->sy + d->h - 2, d->unkA);
_txt->clearDim(4);
_txt->resetDimTextPositions(4);
return 1;
}
int LoLEngine::olol_setupBackgroundAnimationPart(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setupBackgroundAnimationPart(%p) (%d, %d, %d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8), stackPos(9));
_animator->setupPart(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8), stackPos(9));
return 0;
}
int LoLEngine::olol_startBackgroundAnimation(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_startBackgroundAnimation(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
_animator->start(stackPos(0), stackPos(1));
return 1;
}
int LoLEngine::olol_fadeToBlack(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_fadeToBlack(%p) (%d)", (const void *)script, stackPos(0));
_screen->fadeToBlack(10);
return 1;
}
int LoLEngine::olol_fadePalette(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_fadePalette(%p)", (const void *)script);
if (_flags.use16ColorMode)
setPaletteBrightness(_screen->getPalette(0), _brightness, _lampEffect);
else
_screen->fadePalette(_screen->getPalette(3), 10);
_screen->_fadeFlag = 0;
return 1;
}
int LoLEngine::olol_loadBitmap(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadBitmap(%p) (%s, %d)", (const void *)script, stackPosString(0), stackPos(1));
_screen->loadBitmap(stackPosString(0), 3, 3, &_screen->getPalette(3));
if (stackPos(1) != 2)
_screen->copyPage(3, stackPos(1));
return 1;
}
int LoLEngine::olol_stopBackgroundAnimation(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_stopBackgroundAnimation(%p) (%d)", (const void *)script, stackPos(0));
_animator->stop(stackPos(0));
return 1;
}
int LoLEngine::olol_getGlobalScriptVar(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getGlobalScriptVar(%p) (%d)", (const void *)script, stackPos(0));
assert(stackPos(0) < 24);
return _globalScriptVars[stackPos(0)];
}
int LoLEngine::olol_setGlobalScriptVar(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setGlobalScriptVar(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
assert(stackPos(0) < 24);
_globalScriptVars[stackPos(0)] = stackPos(1);
return 1;
}
int LoLEngine::olol_getGlobalVar(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getGlobalVar(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
switch (stackPos(0)) {
case 0:
return _currentBlock;
case 1:
return _currentDirection;
case 2:
return _currentLevel;
case 3:
return _itemInHand;
case 4:
return _brightness;
case 5:
return _credits;
case 6:
return _globalScriptVars2[stackPos(1)];
case 8:
return _updateFlags;
case 9:
return _lampOilStatus;
case 10:
return _sceneDefaultUpdate;
case 11:
return _compassBroken;
case 12:
return _drainMagic;
case 13:
return getVolume(kVolumeSpeech) - 2;
default:
break;
}
return 0;
}
int LoLEngine::olol_setGlobalVar(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setGlobalVar(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
uint16 a = stackPos(1);
uint16 b = stackPos(2);
switch (stackPos(0)) {
case 0:
_currentBlock = b;
calcCoordinates(_partyPosX, _partyPosY, _currentBlock, 0x80, 0x80);
updateAutoMap(_currentBlock);
break;
case 1:
_currentDirection = b;
break;
case 2:
_currentLevel = b & 0xff;
break;
case 3:
setHandItem(b);
break;
case 4:
_brightness = b & 0xff;
break;
case 5:
_credits = b;
break;
case 6:
_globalScriptVars2[a] = b;
break;
case 7:
break;
case 8:
_updateFlags = b;
if (b == 1) {
if (!textEnabled() || (!(_currentControlMode & 2)))
timerUpdatePortraitAnimations(1);
disableSysTimer(2);
} else {
enableSysTimer(2);
}
break;
case 9:
_lampOilStatus = b & 0xff;
break;
case 10:
_sceneDefaultUpdate = b & 0xff;
gui_toggleButtonDisplayMode(0, 0);
break;
case 11:
_compassBroken = a & 0xff;
break;
case 12:
_drainMagic = a & 0xff;
break;
default:
break;
}
return 1;
}
int LoLEngine::olol_triggerDoorSwitch(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_triggerDoorSwitch(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
processDoorSwitch(stackPos(0)/*, (_wllWallFlags[_levelBlockProperties[stackPos(0)].walls[0]] & 8) ? 0 : 1*/, stackPos(1));
return 1;
}
int LoLEngine::olol_checkEquippedItemScriptFlags(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkEquippedItemScriptFlags(%p)", (const void *)script);
for (int i = 0; i < 4; i++) {
if (!(_characters[i].flags & 1))
continue;
for (int ii = 0; ii < 4; ii++) {
uint8 f = _itemProperties[_itemsInPlay[_characters[i].items[ii]].itemPropertyIndex].itemScriptFunc;
if (f == 0 || f == 2)
return 1;
}
}
return 0;
}
int LoLEngine::olol_setDoorState(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setDoorState(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
if (stackPos(1))
_levelBlockProperties[stackPos(0)].flags = (_levelBlockProperties[stackPos(0)].flags & 0xef) | 0x20;
else
_levelBlockProperties[stackPos(0)].flags &= 0xdf;
return 1;
}
int LoLEngine::olol_updateBlockAnimations(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_updateBlockAnimations(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
int block = stackPos(0);
int wall = stackPos(1);
setWallType(block, wall, _levelBlockProperties[block].walls[(wall == -1) ? 0 : wall] == stackPos(2) ? stackPos(3) : stackPos(2));
return 0;
}
int LoLEngine::olol_mapShapeToBlock(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_mapShapeToBlock(%p) (%d)", (const void *)script, stackPos(0));
return assignLevelShapes(stackPos(0));
}
int LoLEngine::olol_resetBlockShapeAssignment(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_resetBlockShapeAssignment(%p) (%d)", (const void *)script, stackPos(0));
uint8 v = stackPos(0) & 0xff;
memset(_wllShapeMap + 3, v, 5);
memset(_wllShapeMap + 13, v, 5);
return 1;
}
int LoLEngine::olol_copyRegion(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_copyRegion(%p) (%d, %d, %d, %d, %d, %d, %d, %d)",
(const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7));
_screen->copyRegion(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), Screen::CR_NO_P_CHECK);
if (!stackPos(7))
_screen->updateScreen();
return 1;
}
int LoLEngine::olol_initMonster(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_initMonster(%p) (%d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script,
stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8), stackPos(9), stackPos(10));
uint16 x = 0;
uint16 y = 0;
calcCoordinates(x, y, stackPos(0), stackPos(1), stackPos(2));
uint16 w = _monsterProperties[stackPos(4)].maxWidth;
if (checkBlockBeforeObjectPlacement(x, y, w, 7, 7))
return -1;
for (uint8 i = 0; i < 30; i++) {
MonsterInPlay *l = &_monsters[i];
if (l->hitPoints || l->mode == 13)
continue;
memset(l, 0, sizeof(MonsterInPlay));
l->id = i;
l->x = x;
l->y = y;
l->facing = stackPos(3);
l->type = stackPos(4);
l->properties = &_monsterProperties[l->type];
l->direction = l->facing << 1;
l->hitPoints = (l->properties->hitPoints * _monsterModifiers[_monsterDifficulty]) >> 8;
if (_currentLevel == 12 && l->type == 2)
l->hitPoints = (l->hitPoints * (rollDice(1, 128) + 192)) >> 8;
l->numDistAttacks = l->properties->numDistAttacks;
l->distAttackTick = rollDice(1, calcMonsterSkillLevel(l->id | 0x8000, 8)) - 1;
l->flyingHeight = 2;
l->flags = stackPos(5);
l->assignedItems = 0;
setMonsterMode(l, stackPos(6));
placeMonster(l, l->x, l->y);
l->destX = l->x;
l->destY = l->y;
l->destDirection = l->direction;
for (int ii = 0; ii < 4; ii++)
l->equipmentShapes[ii] = stackPos(7 + ii);
checkSceneUpdateNeed(l->block);
return i;
}
return -1;
}
int LoLEngine::olol_fadeClearSceneWindow(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_fadeClearSceneWindow(%p)", (const void *)script);
_screen->fadeClearSceneWindow(10);
return 1;
}
int LoLEngine::olol_fadeSequencePalette(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_fadeSequencePalette(%p)", (const void *)script);
if (_flags.use16ColorMode) {
setPaletteBrightness(_screen->getPalette(0), _brightness, _lampEffect);
} else {
_screen->getPalette(3).copy(_screen->getPalette(0), 128);
_screen->loadSpecialColors(_screen->getPalette(3));
_screen->fadePalette(_screen->getPalette(3), 10);
}
_screen->_fadeFlag = 0;
return 1;
}
int LoLEngine::olol_redrawPlayfield(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_redrawPlayfield(%p)", (const void *)script);
if (_screen->_fadeFlag != 2)
_screen->fadeClearSceneWindow(10);
gui_drawPlayField();
setPaletteBrightness(_screen->getPalette(0), _brightness, _lampEffect);
_screen->_fadeFlag = 0;
return 1;
}
int LoLEngine::olol_loadNewLevel(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadNewLevel(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
_screen->fadeClearSceneWindow(10);
_screen->fillRect(112, 0, 288, 120, 0);
disableSysTimer(2);
for (int i = 0; i < 8; i++) {
if (!_flyingObjects[i].enable || _flyingObjects[i].objectType)
continue;
endObjectFlight(&_flyingObjects[i], _flyingObjects[i].x, _flyingObjects[i].y, 1);
}
completeDoorOperations();
generateTempData();
_currentBlock = stackPos(1);
_currentDirection = stackPos(2);
calcCoordinates(_partyPosX, _partyPosY, _currentBlock, 0x80, 0x80);
loadLevel(stackPos(0));
enableSysTimer(2);
script->ip = 0;
return 1;
}
int LoLEngine::olol_getNearestMonsterFromCharacter(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getNearestMonsterFromCharacter(%p) (%d)", (const void *)script, stackPos(0));
return getNearestMonsterFromCharacter(stackPos(0));
}
int LoLEngine::olol_dummy0(EMCState *script) {
return 0;
}
int LoLEngine::olol_loadMonsterProperties(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadMonsterProperties(%p) (%d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d, %d)",
(const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5),
stackPos(6), stackPos(7), stackPos(8), stackPos(9), stackPos(10), stackPos(11), stackPos(12), stackPos(13),
stackPos(14), stackPos(15), stackPos(16), stackPos(17), stackPos(18), stackPos(19), stackPos(20),
stackPos(21), stackPos(22), stackPos(23), stackPos(24), stackPos(25), stackPos(26), stackPos(27),
stackPos(28), stackPos(29), stackPos(30), stackPos(31), stackPos(32), stackPos(33), stackPos(34),
stackPos(35), stackPos(36), stackPos(37), stackPos(38), stackPos(39), stackPos(40), stackPos(41));
MonsterProperty *l = &_monsterProperties[stackPos(0)];
l->shapeIndex = stackPos(1) & 0xff;
int shpWidthMax = 0;
for (int i = 0; i < 16; i++) {
uint8 m = _monsterShapes[(l->shapeIndex << 4) + i][3];
if (m > shpWidthMax)
shpWidthMax = m;
}
l->maxWidth = shpWidthMax;
l->fightingStats[0] = (stackPos(2) << 8) / 100; // hit chance
l->fightingStats[1] = 256; //
l->fightingStats[2] = (stackPos(3) << 8) / 100; // protection
l->fightingStats[3] = stackPos(4); // evade chance
l->fightingStats[4] = (stackPos(5) << 8) / 100; // speed
l->fightingStats[5] = (stackPos(6) << 8) / 100; //
l->fightingStats[6] = (stackPos(7) << 8) / 100; //
l->fightingStats[7] = (stackPos(8) << 8) / 100; //
l->fightingStats[8] = 0;
for (int i = 0; i < 8; i++) {
l->itemsMight[i] = stackPos(9 + i);
l->protectionAgainstItems[i] = (stackPos(17 + i) << 8) / 100;
}
l->itemProtection = stackPos(25);
l->hitPoints = stackPos(26);
l->speedTotalWaitTicks = 1;
l->flags = stackPos(27);
l->unk5 = stackPos(28);
// FIXME???
l->unk5 = stackPos(29);
//
l->numDistAttacks = stackPos(30);
l->numDistWeapons = stackPos(31);
for (int i = 0; i < 3; i++)
l->distWeapons[i] = stackPos(32 + i);
l->attackSkillChance = stackPos(35);
l->attackSkillType = stackPos(36);
l->defenseSkillChance = stackPos(37);
l->defenseSkillType = stackPos(38);
for (int i = 0; i < 3; i++)
l->sounds[i] = stackPos(39 + i);
return 1;
}
int LoLEngine::olol_battleHitSkillTest(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_battleHitSkillTest(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
return battleHitSkillTest(stackPos(0), stackPos(1), stackPos(2));
}
int LoLEngine::olol_inflictDamage(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_inflictDamage(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
if (stackPos(0) == -1) {
for (int i = 0; i < 4; i++)
inflictDamage(i, stackPos(1), stackPos(2), stackPos(3), stackPos(4));
} else {
inflictDamage(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
}
return 1;
}
int LoLEngine::olol_moveMonster(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_moveMonster(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
MonsterInPlay *m = &_monsters[stackPos(0)];
if (m->mode == 1 || m->mode == 2) {
calcCoordinates(m->destX, m->destY, stackPos(1), stackPos(2), stackPos(3));
m->destDirection = stackPos(4) << 1;
if (m->x != m->destX || m->y != m->destY)
setMonsterDirection(m, calcMonsterDirection(m->x, m->y, m->destX, m->destY));
}
return 1;
}
int LoLEngine::olol_dialogueBox(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_dialogueBox(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
_tim->drawDialogueBox(stackPos(0), getLangString(stackPos(1)), getLangString(stackPos(2)), getLangString(stackPos(3)));
return 1;
}
int LoLEngine::olol_giveTakeMoney(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_giveTakeMoney(%p) (%d)", (const void *)script, stackPos(0));
int c = stackPos(0);
if (c >= 0)
giveCredits(c, 1);
else
takeCredits(-c, 1);
return 1;
}
int LoLEngine::olol_checkMoney(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkMoney(%p) (%d)", (const void *)script, stackPos(0));
return (stackPos(0) > _credits) ? 0 : 1;
}
int LoLEngine::olol_setScriptTimer(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setScriptTimer(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
uint8 id = 0x50 + stackPos(0);
if (stackPos(1)) {
_timer->enable(id);
_timer->setCountdown(id, stackPos(1));
} else {
_timer->disable(id);
}
return 1;
}
int LoLEngine::olol_createHandItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_createHandItem(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (_itemInHand)
return 0;
setHandItem(makeItem(stackPos(0), stackPos(1), stackPos(2)));
return 1;
}
int LoLEngine::olol_playAttackSound(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playAttackSound(%p) (%d)", (const void *)script, stackPos(0));
static const uint8 sounds[] = { 12, 62, 63 };
int d = stackPos(0);
if ((d < 70 || d > 74) && (d < 81 || d > 89) && (d < 93 || d > 97) && (d < 102 || d > 106))
snd_playSoundEffect(sounds[_itemProperties[d].skill & 3], -1);
else
snd_playSoundEffect(12, -1);
return 1;
}
int LoLEngine::olol_characterJoinsParty(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_characterJoinsParty(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
int16 id = stackPos(0);
if (id < 0)
id = -id;
for (int i = 0; i < 4; i++) {
if (!(_characters[i].flags & 1) || _characters[i].id != id)
continue;
_characters[i].flags &= 0xfffe;
calcCharPortraitXpos();
if (!_updateFlags) {
gui_enableDefaultPlayfieldButtons();
gui_drawPlayField();
}
if (_selectedCharacter == i)
_selectedCharacter = 0;
return 1;
}
addCharacter(id);
if (!_updateFlags) {
gui_enableDefaultPlayfieldButtons();
gui_drawPlayField();
}
return 1;
}
int LoLEngine::olol_giveItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_giveItem(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
int item = makeItem(stackPos(0), stackPos(1), stackPos(2));
if (addItemToInventory(item))
return 1;
deleteItem(item);
return 0;
}
int LoLEngine::olol_loadTimScript(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadTimScript(%p) (%d, %s)", (const void *)script, stackPos(0), stackPosString(1));
if (_activeTim[stackPos(0)])
return 1;
char file[13];
snprintf(file, sizeof(file), "%s.TIM", stackPosString(1));
_activeTim[stackPos(0)] = _tim->load(file, &_timIngameOpcodes);
return 1;
}
int LoLEngine::olol_runTimScript(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_runTimScript(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
return _tim->exec(_activeTim[stackPos(0)], stackPos(1));
}
int LoLEngine::olol_releaseTimScript(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_releaseTimScript(%p) (%d)", (const void *)script, stackPos(0));
_tim->unload(_activeTim[stackPos(0)]);
return 1;
}
int LoLEngine::olol_initSceneWindowDialogue(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_initSceneWindowDialogue(%p) (%d)", (const void *)script, stackPos(0));
initSceneWindowDialogue(stackPos(0));
return 1;
}
int LoLEngine::olol_restoreAfterSceneWindowDialogue(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restoreAfterSceneWindowDialogue(%p) (%d)", (const void *)script, stackPos(0));
restoreAfterSceneWindowDialogue(stackPos(0));
return 1;
}
int LoLEngine::olol_getItemInHand(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getItemInHand(%p))", (const void *)script);
return _itemInHand;
}
int LoLEngine::olol_checkMagic(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkMagic(%p )(%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
return checkMagic(stackPos(0), stackPos(1), stackPos(2));
}
int LoLEngine::olol_giveItemToMonster(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_giveItemToMonster(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
if (stackPos(0) == -1)
return 0;
giveItemToMonster(&_monsters[stackPos(0)], stackPos(1));
return 1;
}
int LoLEngine::olol_loadLangFile(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadLangFile(%p) (%s)", (const void *)script, stackPosString(0));
char filename[13];
snprintf(filename, sizeof(filename), "%s.%s", stackPosString(0), _languageExt[_lang]);
delete[] _levelLangFile;
_levelLangFile = _res->fileData(filename, 0);
return 1;
}
int LoLEngine::olol_playSoundEffect(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playSoundEffect(%p) (%d)", (const void *)script, stackPos(0));
snd_playSoundEffect(stackPos(0), -1);
return 1;
}
int LoLEngine::olol_processDialogue(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_processDialogue(%p)", (const void *)script);
return _tim->processDialogue();
}
int LoLEngine::olol_stopTimScript(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_stopTimScript(%p) (%d)", (const void *)script, stackPos(0));
_tim->stopAllFuncs(_activeTim[stackPos(0)]);
return 1;
}
int LoLEngine::olol_getWallFlags(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getWallFlags(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
return _wllWallFlags[_levelBlockProperties[stackPos(0)].walls[stackPos(1) & 3]];
}
int LoLEngine::olol_changeMonsterStat(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_changeMonsterStat(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (stackPos(0) == -1)
return 1;
MonsterInPlay *m = &_monsters[stackPos(0) & 0x7fff];
int16 d = stackPos(2);
uint16 x = 0;
uint16 y = 0;
switch (stackPos(1)) {
case 0:
setMonsterMode(m, d);
break;
case 1:
m->hitPoints = d;
break;
case 2:
calcCoordinates(x, y, d, m->x & 0xff, m->y & 0xff);
if (!walkMonsterCheckDest(x, y, m, 7))
placeMonster(m, x, y);
break;
case 3:
setMonsterDirection(m, d << 1);
break;
case 6:
m->flags |= d;
break;
default:
break;
}
return 1;
}
int LoLEngine::olol_getMonsterStat(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getMonsterStat(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
if (stackPos(0) == -1)
return 0;
MonsterInPlay *m = &_monsters[stackPos(0) & 0x7fff];
int d = stackPos(1);
switch (d) {
case 0:
return m->mode;
case 1:
return m->hitPoints;
case 2:
return m->block;
case 3:
return m->facing;
case 4:
return m->type;
case 5:
return m->properties->hitPoints;
case 6:
return m->flags;
case 7:
return m->properties->flags;
case 8:
return _monsterAnimType[m->properties->shapeIndex];
default:
break;
}
return 0;
}
int LoLEngine::olol_releaseMonsterShapes(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_releaseMonsterShapes(%p)", (const void *)script);
for (int i = 0; i < 3; i++)
releaseMonsterShapes(i);
return 0;
}
int LoLEngine::olol_playCharacterScriptChat(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playCharacterScriptChat(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (_flags.isTalkie) {
snd_stopSpeech(1);
stopPortraitSpeechAnim();
}
return playCharacterScriptChat(stackPos(0), stackPos(1), 1, getLangString(stackPos(2)), script, 0, 3);
}
int LoLEngine::olol_playEnvironmentalSfx(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playEnvironmentalSfx(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
uint16 block = (stackPos(1) == -1) ? _currentBlock : stackPos(1);
snd_processEnvironmentalSoundEffect(stackPos(0), block);
return 1;
}
int LoLEngine::olol_update(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_update(%p)", (const void *)script);
update();
return 1;
}
int LoLEngine::olol_healCharacter(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_healCharacter(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
if (stackPos(3)) {
processMagicHeal(stackPos(0), stackPos(1));
} else {
increaseCharacterHitpoints(stackPos(0), stackPos(1), true);
if (stackPos(2))
gui_drawCharPortraitWithStats(stackPos(0));
}
return 1;
}
int LoLEngine::olol_drawExitButton(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_drawExitButton(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
static const uint8 printPara[] = { 0x90, 0x78, 0x0C, 0x9F, 0x80, 0x1E };
int cp = _screen->setCurPage(0);
Screen::FontId cf = _screen->setFont(Screen::FID_6_FNT);
int x = printPara[3 * stackPos(0)] << 1;
int y = printPara[3 * stackPos(0) + 1];
int offs = printPara[3 * stackPos(0) + 2];
char *str = getLangString(0x4033);
int w = _screen->getTextWidth(str);
if (_flags.use16ColorMode) {
gui_drawBox(x - offs - w, y - 9, w + offs, 9, 0xee, 0xcc, 0x11);
_screen->printText(str, x - (offs >> 1) - w, y - 7, 0xbb, 0);
} else {
gui_drawBox(x - offs - w, y - 9, w + offs, 9, 136, 251, 252);
_screen->printText(str, x - (offs >> 1) - w, y - 7, 144, 0);
}
if (stackPos(1))
_screen->drawGridBox(x - offs - w + 1, y - 8, w + offs - 2, 7, 1);
_screen->setFont(cf);
_screen->setCurPage(cp);
return 1;
}
int LoLEngine::olol_loadSoundFile(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_loadSoundFile(%p) (%d)", (const void *)script, stackPos(0));
snd_loadSoundFile(stackPos(0));
return 1;
}
int LoLEngine::olol_playMusicTrack(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playMusicTrack(%p) (%d)", (const void *)script, stackPos(0));
return snd_playTrack(stackPos(0));
}
int LoLEngine::olol_deleteMonstersFromBlock(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_deleteMonstersFromBlock(%p) (%d)", (const void *)script, stackPos(0));
deleteMonstersFromBlock(stackPos(0));
return 1;
}
int LoLEngine::olol_countBlockItems(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_countBlockItems(%p) (%d)", (const void *)script, stackPos(0));
uint16 o = _levelBlockProperties[stackPos(0)].assignedObjects;
int res = 0;
while (o) {
if (!(o & 0x8000))
res++;
o = findObject(o)->nextAssignedObject;
}
return res;
}
int LoLEngine::olol_characterSkillTest(EMCState *script){
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_characterSkillTest(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
int skill = stackPos(0);
int n = countActiveCharacters();
int m = 0;
int c = 0;
for (int i = 0; i < n ; i++) {
int v = _characters[i].skillModifiers[skill] + _characters[i].skillLevels[skill] + 25;
if (v > m) {
m = v;
c = i;
}
}
return (rollDice(1, 100) > m) ? -1 : c;
}
int LoLEngine::olol_countAllMonsters(EMCState *script){
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_countAllMonsters(%p)", (const void *)script);
int res = 0;
for (int i = 0; i < 30; i++) {
if (_monsters[i].hitPoints > 0 && _monsters[i].mode != 13)
res++;
}
return res;
}
int LoLEngine::olol_playEndSequence(EMCState *script){
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playEndSequence(%p)", (const void *)script);
int c = 0;
if (_characters[0].id == -9)
c = 1;
else if (_characters[0].id == -5)
c = 3;
else if (_characters[0].id == -1)
c = 2;
while (snd_updateCharacterSpeech())
delay(_tickLength);
_eventList.clear();
_screen->hideMouse();
_screen->getPalette(1).clear();
showOutro(c, (_monsterDifficulty == 2));
quitGame();
return 0;
}
int LoLEngine::olol_stopPortraitSpeechAnim(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_stopPortraitSpeechAnim(%p)", (const void *)script);
if (_flags.isTalkie)
snd_stopSpeech(1);
stopPortraitSpeechAnim();
return 1;
}
int LoLEngine::olol_setPaletteBrightness(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setPaletteBrightness(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
uint16 old = _brightness;
_brightness = stackPos(0);
if (stackPos(1) == 1)
setPaletteBrightness(_screen->getPalette(0), stackPos(0), _lampEffect);
return old;
}
int LoLEngine::olol_calcInflictableDamage(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_calcInflictableDamage(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
return calcInflictableDamage(stackPos(0), stackPos(1), stackPos(2));<|fim▁hole|> debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getInflictedDamage(%p) (%d)", (const void *)script, stackPos(0));
int mx = stackPos(0);
return rollDice(2, mx);
}
int LoLEngine::olol_checkForCertainPartyMember(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkForCertainPartyMember(%p) (%d)", (const void *)script, stackPos(0));
for (int i = 0; i < 4; i++) {
if (_characters[i].flags & 9 && _characters[i].id == stackPos(0))
return 1;
}
return 0;
}
int LoLEngine::olol_printMessage(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_printMessage(%p) (%d, %d, %d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8), stackPos(9));
int snd = stackPos(2);
_txt->printMessage(stackPos(0), getLangString(stackPos(1)), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8), stackPos(9));
if (snd >= 0)
snd_playSoundEffect(snd, -1);
return 1;
}
int LoLEngine::olol_deleteLevelItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_deleteLevelItem(%p) (%d)", (const void *)script, stackPos(0));
if (_itemsInPlay[stackPos(0)].block)
removeLevelItem(stackPos(0), _itemsInPlay[stackPos(0)].block);
deleteItem(stackPos(0));
return 1;
}
int LoLEngine::olol_calcInflictableDamagePerItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_calcInflictableDamagePerItem(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
return calcInflictableDamagePerItem(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
}
int LoLEngine::olol_distanceAttack(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_distanceAttack(%p) (%d, %d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7), stackPos(8));
uint16 fX = stackPos(3);
uint16 fY = stackPos(4);
if (!(stackPos(8) & 0x8000))
fX = fY = 0x80;
uint16 x = 0;
uint16 y = 0;
calcCoordinates(x, y, stackPos(2), fX, fY);
if (launchObject(stackPos(0), stackPos(1), x, y, stackPos(5), stackPos(6) << 1, stackPos(7), stackPos(8), 0x3f))
return 1;
deleteItem(stackPos(1));
return 0;
}
int LoLEngine::olol_removeCharacterEffects(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_removeCharacterEffects(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
removeCharacterEffects(&_characters[stackPos(0)], stackPos(1), stackPos(2));
return 1;
}
int LoLEngine::olol_checkInventoryFull(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkInventoryFull(%p)", (const void *)script);
for (int i = 0; i < 48; i++) {
if (_inventory[i])
return 0;
}
return 1;
}
int LoLEngine::olol_objectLeavesLevel(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_objectLeavesLevel(%p) (%d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5));
int o = _levelBlockProperties[stackPos(0)].assignedObjects;
int res = 0;
int level = stackPos(2);
int block = stackPos(1);
int runScript = stackPos(4);
int includeMonsters = stackPos(3);
int includeItems = stackPos(5);
// WORKAROUND for script bug
// Items would vanish when thrown towards the stairs
// in white tower level 3.
if (_currentLevel == 21 && level == 21 && block == 0x3e0) {
level = 20;
block = 0x0247;
}
while (o) {
int l = o;
o = findObject(o)->nextAssignedObject;
if (l & 0x8000) {
if (!includeMonsters)
continue;
l &= 0x7fff;
MonsterInPlay *m = &_monsters[l];
setMonsterMode(m, 14);
checkSceneUpdateNeed(m->block);
placeMonster(m, 0, 0);
res = 1;
} else {
if (!(_itemsInPlay[l].shpCurFrame_flg & 0x4000) || !includeItems)
continue;
placeMoveLevelItem(l, level, block, _itemsInPlay[l].x & 0xff, _itemsInPlay[l].y & 0xff, _itemsInPlay[l].flyingHeight);
if (!runScript || level != _currentLevel) {
res = 1;
continue;
}
runLevelScriptCustom(block, 0x80, -1, l, 0, 0);
res = 1;
}
}
return res;
}
int LoLEngine::olol_addSpellToScroll(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_addSpellToScroll(%p) (%d)", (const void *)script, stackPos(0));
addSpellToScroll(stackPos(0), stackPos(1));
return 1;
}
int LoLEngine::olol_playDialogueText(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playDialogueText(%p) (%d)", (const void *)script, stackPos(0));
_txt->printDialogueText(3, getLangString(stackPos(0)), script, 0, 1);
return 1;
}
int LoLEngine::olol_playDialogueTalkText(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_playDialogueTalkText(%p) (%d)", (const void *)script, stackPos(0));
int track = stackPos(0);
if (!snd_playCharacterSpeech(track, 0, 0) || textEnabled()) {
char *s = getLangString(track);
_txt->printDialogueText(4, s, script, 0, 1);
}
return 1;
}
int LoLEngine::olol_checkMonsterTypeHostility(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkMonsterTypeHostility(%p) (%d)", (const void *)script, stackPos(0));
for (int i = 0; i < 30; i++) {
if (stackPos(0) != _monsters[i].type && stackPos(0) != -1)
continue;
return (_monsters[i].mode == 1) ? 0 : 1;
}
return 1;
}
int LoLEngine::olol_setNextFunc(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setNextFunc(%p) (%d)", (const void *)script, stackPos(0));
_nextScriptFunc = stackPos(0);
return 1;
}
int LoLEngine::olol_dummy1(EMCState *script) {
return 1;
}
int LoLEngine::olol_suspendMonster(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_suspendMonster(%p) (%d)", (const void *)script, stackPos(0));
MonsterInPlay *m = &_monsters[stackPos(0) & 0x7fff];
setMonsterMode(m, 14);
checkSceneUpdateNeed(m->block);
placeMonster(m, 0, 0);
return 1;
}
int LoLEngine::olol_setScriptTextParameter(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setScriptTextParameter(%p) (%d)", (const void *)script, stackPos(0));
_txt->_scriptTextParameter = stackPos(0);
return 1;
}
int LoLEngine::olol_triggerEventOnMouseButtonClick(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_triggerEventOnMouseButtonClick(%p) (%d)", (const void *)script, stackPos(0));
gui_notifyButtonListChanged();
snd_updateCharacterSpeech();
int f = checkInput(0);
removeInputTop();
if (f == 0 || (f & 0x800))
return 0;
int evt = stackPos(0);
if (evt) {
gui_triggerEvent(evt);
_seqTrigger = 1;
} else {
removeInputTop();
}
return 1;
}
int LoLEngine::olol_printWindowText(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_printWindowText(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
int dim = stackPos(0);
int flg = stackPos(1);
_screen->setScreenDim(dim);
if (flg & 1)
_txt->clearCurDim();
if (flg & 3)
_txt->resetDimTextPositions(dim);
_txt->printDialogueText(dim, getLangString(stackPos(2)), script, 0, 3);
return 1;
}
int LoLEngine::olol_countSpecificMonsters(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_countSpecificMonsters(%p) (%d, ...)", (const void *)script, stackPos(0));
uint16 types = 0;
int res = 0;
int cnt = 0;
while (stackPos(cnt) != -1)
types |= (1 << stackPos(cnt++));
for (int i = 0; i < 30; i++) {
if (((1 << _monsters[i].type) & types) && _monsters[i].mode < 14)
res++;
}
return res;
}
int LoLEngine::olol_updateBlockAnimations2(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_updateBlockAnimations2(%p) (%d, %d, %d, %d, ...)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
int numFrames = stackPos(3);
assert (numFrames <= 97);
int curFrame = stackPos(2) % numFrames;
setWallType(stackPos(0), stackPos(1), stackPos(4 + curFrame));
return 0;
}
int LoLEngine::olol_checkPartyForItemType(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkPartyForItemType(%p) (%d, %d, %d))", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
int p = stackPos(1);
if (!stackPos(2)) {
for (int i = 0; i < 48; i++) {
if (!_inventory[i] || _itemsInPlay[_inventory[i]].itemPropertyIndex != p)
continue;
return 1;
}
if (_itemsInPlay[_itemInHand].itemPropertyIndex == p)
return 1;
}
int last = (stackPos(0) == -1) ? 3 : stackPos(0);
int first = (stackPos(0) == -1) ? 0 : stackPos(0);
for (int i = first; i <= last; i++) {
if (itemEquipped(i, p))
return 1;
}
return 0;
}
int LoLEngine::olol_blockDoor(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_blockDoor(%p) (%d)", (const void *)script, stackPos(0));
_blockDoor = stackPos(0);
return _blockDoor;
}
int LoLEngine::olol_resetTimDialogueState(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_resetTimDialogueState(%p) (%d)", (const void *)script, stackPos(0));
_tim->resetDialogueState(_activeTim[stackPos(0)]);
return 1;
}
int LoLEngine::olol_getItemOnPos(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getItemOnPos(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
int pX = stackPos(1);
if (pX != -1)
pX &= 0xff;
int pY = stackPos(2);
if (pY != -1)
pY &= 0xff;
int o = (stackPos(3) || _emcLastItem == -1) ? stackPos(0) : _emcLastItem;
_emcLastItem = _levelBlockProperties[o].assignedObjects;
while (_emcLastItem) {
if (_emcLastItem & 0x8000) {
o = _emcLastItem & 0x7fff;
_emcLastItem = _levelBlockProperties[o].assignedObjects;
continue;
}
if (pX != -1 && (_itemsInPlay[_emcLastItem].x & 0xff) != pX) {
o = _emcLastItem & 0x7fff;
_emcLastItem = _levelBlockProperties[o].assignedObjects;
continue;
}
if (pY != -1 && (_itemsInPlay[_emcLastItem].y & 0xff) != pY) {
o = _emcLastItem & 0x7fff;
_emcLastItem = _levelBlockProperties[o].assignedObjects;
continue;
}
return _emcLastItem;
}
return 0;
}
int LoLEngine::olol_removeLevelItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_removeLevelItem(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
removeLevelItem(stackPos(0), stackPos(1));
return 1;
}
int LoLEngine::olol_savePage5(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_savePage5(%p)", (const void *)script);
// Not implemented: The original code uses this to back up and restore page 5 which is used
// to load WSA files. Since our WSA player provides its own memory buffers we don't
// need to use page 5.
return 1;
}
int LoLEngine::olol_restorePage5(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restorePage5(%p)", (const void *)script);
// Not implemented: The original code uses this to back up and restore page 5 which is used
// to load WSA files. Since our WSA player provides its own memory buffers we don't
// need to use page 5.
for (int i = 0; i < 6; i++)
_tim->freeAnimStruct(i);
return 1;
}
int LoLEngine::olol_initDialogueSequence(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_initDialogueSequence(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
initDialogueSequence(stackPos(0), stackPos(1));
return 1;
}
int LoLEngine::olol_restoreAfterDialogueSequence(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restoreAfterDialogueSequence(%p) (%d)", (const void *)script, stackPos(0));
restoreAfterDialogueSequence(stackPos(0));
return 1;
}
int LoLEngine::olol_setSpecialSceneButtons(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setSpecialSceneButtons(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
setSpecialSceneButtons(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
return 1;
}
int LoLEngine::olol_restoreButtonsAfterSpecialScene(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restoreButtonsAfterSpecialScene(%p)", (const void *)script);
gui_specialSceneRestoreButtons();
return 1;
}
int LoLEngine::olol_prepareSpecialScene(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_prepareSpecialScene(%p) (%d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5));
prepareSpecialScene(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5));
return 1;
}
int LoLEngine::olol_restoreAfterSpecialScene(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restoreAfterSpecialScene(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
return restoreAfterSpecialScene(stackPos(0), stackPos(1), stackPos(2), stackPos(3));
}
int LoLEngine::olol_assignCustomSfx(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_assignCustomSfx(%p) (%s, %d)", (const void *)script, stackPosString(0), stackPos(1));
const char *c = stackPosString(0);
int i = stackPos(1);
if (!c || i > 250)
return 0;
uint16 t = READ_LE_UINT16(&_ingameSoundIndex[i << 1]);
if (t == 0xffff)
return 0;
strcpy(_ingameSoundList[t], c);
return 0;
}
int LoLEngine::olol_findAssignedMonster(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_findAssignedMonster(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
uint16 o = stackPos(1) == -1 ? _levelBlockProperties[stackPos(0)].assignedObjects : findObject(stackPos(1))->nextAssignedObject;
while (o) {
if (o & 0x8000)
return o & 0x7fff;
o = findObject(o)->nextAssignedObject;
}
return -1;
}
int LoLEngine::olol_checkBlockForMonster(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkBlockForMonster(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
uint16 block = stackPos(0);
uint16 id = stackPos(1) | 0x8000;
uint16 o = _levelBlockProperties[block].assignedObjects;
while (o & 0x8000) {
if (id == 0xffff || id == o)
return o & 0x7fff;
o = findObject(o)->nextAssignedObject;
}
return -1;
}
int LoLEngine::olol_transformRegion(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_transformRegion(%p) (%d, %d, %d, %d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7));
transformRegion(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4), stackPos(5), stackPos(6), stackPos(7));
return 1;
}
int LoLEngine::olol_calcCoordinatesAddDirectionOffset(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_calcCoordinatesAddDirectionOffset(%p) (%d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3));
uint16 x = stackPos(0);
uint16 y = stackPos(1);
calcCoordinatesAddDirectionOffset(x, y, stackPos(2));
return stackPos(3) ? x : y;
}
int LoLEngine::olol_resetPortraitsAndDisableSysTimer(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_resetPortraitsAndDisableSysTimer(%p)", (const void *)script);
resetPortraitsAndDisableSysTimer();
return 1;
}
int LoLEngine::olol_enableSysTimer(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_enableSysTimer(%p)", (const void *)script);
_needSceneRestore = 0;
enableSysTimer(2);
return 1;
}
int LoLEngine::olol_checkNeedSceneRestore(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_checkNeedSceneRestore(%p)", (const void *)script);
return _needSceneRestore;
}
int LoLEngine::olol_getNextActiveCharacter(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getNextActiveCharacter(%p) (%d)", (const void *)script, stackPos(0));
if (stackPos(0))
_scriptCharacterCycle = 0;
else
_scriptCharacterCycle++;
while (_scriptCharacterCycle < 4) {
if (_characters[_scriptCharacterCycle].flags & 1)
return _scriptCharacterCycle;
_scriptCharacterCycle++;
}
return -1;
}
int LoLEngine::olol_paralyzePoisonCharacter(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_paralyzePoisonCharacter(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
return paralyzePoisonCharacter(stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
}
int LoLEngine::olol_drawCharPortrait(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_drawCharPortrait(%p) (%d)", (const void *)script, stackPos(0));
int charNum = stackPos(0);
if (charNum == -1)
gui_drawAllCharPortraitsWithStats();
else
gui_drawCharPortraitWithStats(charNum);
return 1;
}
int LoLEngine::olol_removeInventoryItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_removeInventoryItem(%p) (%d)", (const void *)script, stackPos(0));
int itemType = stackPos(0);
for (int i = 0; i < 48; i++) {
if (!_inventory[i] || _itemsInPlay[_inventory[i]].itemPropertyIndex != itemType)
continue;
_inventory[i] = 0;
gui_drawInventory();
return 1;
}
for (int i = 0; i < 4; i++) {
if (!(_characters[i].flags & 1))
continue;
for (int ii = 0; ii < 11; ii++) {
if (!_characters[i].items[ii] || _itemsInPlay[_characters[i].items[ii]].itemPropertyIndex != itemType)
continue;
_characters[i].items[ii] = 0;
return 1;
}
}
return 0;
}
int LoLEngine::olol_getAnimationLastPart(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getAnimationLastPart(%p) (%d)", (const void *)script, stackPos(0));
return _animator->resetLastPart(stackPos(0));
}
int LoLEngine::olol_assignSpecialGuiShape(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_assignSpecialGuiShape(%p) (%d, %d, %d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2), stackPos(3), stackPos(4));
if (stackPos(0)) {
_specialGuiShape = _levelShapes[_levelShapeProperties[_wllShapeMap[stackPos(0)]].shapeIndex[stackPos(1)]];
_specialGuiShapeX = stackPos(2);
_specialGuiShapeY = stackPos(3);
_specialGuiShapeMirrorFlag = stackPos(4);
} else {
_specialGuiShape = 0;
_specialGuiShapeX = _specialGuiShapeY = _specialGuiShapeMirrorFlag = 0;
}
return 1;
}
int LoLEngine::olol_findInventoryItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_findInventoryItem(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (stackPos(0) == 0) {
for (int i = 0; i < 48; i++) {
if (!_inventory[i])
continue;
if (_itemsInPlay[_inventory[i]].itemPropertyIndex == stackPos(2))
return 0;
}
}
int cur = stackPos(1);
int last = cur;
if (stackPos(1) == -1) {
cur = 0;
last = 4;
}
for (;cur < last; cur++) {
if (!(_characters[cur].flags & 1))
continue;
for (int i = 0; i < 11; i++) {
if (!_characters[cur].items[i])
continue;
if (_itemsInPlay[_characters[cur].items[i]].itemPropertyIndex == stackPos(2))
return cur;
}
}
return -1;
}
int LoLEngine::olol_restoreFadePalette(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restoreFadePalette(%p)", (const void *)script);
_screen->getPalette(0).copy(_screen->getPalette(1), 0, _flags.use16ColorMode ? 16 : 128);
_screen->fadePalette(_screen->getPalette(0), 10);
_screen->_fadeFlag = 0;
return 1;
}
int LoLEngine::olol_getSelectedCharacter(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getSelectedCharacter(%p)", (const void *)script);
return _selectedCharacter;
}
int LoLEngine::olol_setHandItem(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setHandItem(%p) (%d)", (const void *)script, stackPos(0));
setHandItem(stackPos(0));
return 1;
}
int LoLEngine::olol_drinkBezelCup(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_drinkBezelCup(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
drinkBezelCup(3 - stackPos(0), stackPos(1));
return 1;
}
int LoLEngine::olol_changeItemTypeOrFlag(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_changeItemTypeOrFlag(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (stackPos(0) < 1)
return 0;
ItemInPlay *i = &_itemsInPlay[stackPos(0)];
int r = stackPos(2) & 0x1fff;
if (stackPos(1) == 4) {
i->itemPropertyIndex = r;
return r;
} else if (stackPos(1) == 15) {
i->shpCurFrame_flg = (i->shpCurFrame_flg & 0xe000) | r;
return r;
}
return -1;
}
int LoLEngine::olol_placeInventoryItemInHand(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_placeInventoryItemInHand(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
int itemType = stackPos(0);
int i = 0;
for (; i < 48; i++) {
if (!_inventory[i])
continue;
if (_itemsInPlay[_inventory[i]].itemPropertyIndex == itemType)
break;
}
if (i == 48)
return -1;
_inventoryCurItem = i;
int r = _itemInHand;
setHandItem(_inventory[i]);
_inventory[i] = r;
if (stackPos(1))
gui_drawInventory();
return r;
}
int LoLEngine::olol_castSpell(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_castSpell(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
return castSpell(stackPos(0), stackPos(1), stackPos(2));
}
int LoLEngine::olol_pitDrop(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_pitDrop(%p) (%d)", (const void *)script, stackPos(0));
int m = stackPos(0);
_screen->updateScreen();
if (m) {
gui_drawScene(2);
pitDropScroll(9);
snd_playSoundEffect(-1, -1);
shakeScene(30, 4, 0, 1);
} else {
int t = -1;
for (int i = 0; i < 4; i++) {
if (!(_characters[i].flags & 1) || (_characters[i].id >= 0))
continue;
if (_characters[i].id == -1)
t = 54;
else if (_characters[i].id == -5)
t = 53;
else if (_characters[i].id == -8)
t = 52;
else if (_characters[i].id == -9)
t = 51;
}
_screen->fillRect(112, 0, 288, 120, 0, 2);
snd_playSoundEffect(t, -1);
pitDropScroll(12);
}
return 1;
}
int LoLEngine::olol_increaseSkill(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_increaseSkill(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
LoLCharacter *c = &_characters[stackPos(0)];
int s = stackPos(1);
int l = c->skillLevels[s];
increaseExperience(stackPos(0), s, _expRequirements[l] - c->experiencePts[s]);
return c->skillLevels[s] - l;
}
int LoLEngine::olol_paletteFlash(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_paletteFlash(%p) (%d)", (const void *)script, stackPos(0));
Palette &p1 = _screen->getPalette(1);
if (_flags.use16ColorMode) {
Palette p2(16);
p2.copy(p1);
uint8 *d = p2.getData();
for (int i = 0; i < 16; i++)
d[i * 3] = 0x3f;
_screen->setScreenPalette(p2);
_screen->updateScreen();
delay(4 * _tickLength);
_screen->setScreenPalette(p1);
if (_smoothScrollModeNormal)
_screen->copyRegion(112, 0, 112, 0, 176, 120, 2, 0);
_screen->updateScreen();
} else {
Palette &p2 = _screen->getPalette(3);
uint8 ovl[256];
generateFlashPalette(p1, p2, stackPos(0));
_screen->loadSpecialColors(p1);
_screen->loadSpecialColors(p2);
if (_smoothScrollModeNormal) {
for (int i = 0; i < 256; i++)
ovl[i] = i;
ovl[1] = 6;
_screen->copyRegion(112, 0, 112, 0, 176, 120, 0, 2);
_screen->applyOverlay(112, 0, 176, 120, 0, ovl);
}
_screen->setScreenPalette(p2);
_screen->updateScreen();
delay(2 * _tickLength);
_screen->setScreenPalette(p1);
if (_smoothScrollModeNormal)
_screen->copyRegion(112, 0, 112, 0, 176, 120, 2, 0);
_screen->updateScreen();
}
return 0;
}
int LoLEngine::olol_restoreMagicShroud(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_restoreMagicShroud(%p)", (const void *)script);
WSAMovie_v2 *mov = new WSAMovie_v2(this);
mov->open("DARKLITE.WSA", 2, 0);
if (!mov->opened()) {
delete mov;
warning("LoLEngine::olol_restoreMagicShroud: Could not open file: \"DARKLITE.WSA\"");
return 1;
}
_screen->hideMouse();
Palette *fadeTab[28];
for (int i = 0; i < 28; i++)
fadeTab[i] = new Palette(_flags.use16ColorMode ? 16 : 256);
Palette **tpal1 = &fadeTab[0];
Palette **tpal2 = &fadeTab[1];
Palette **tpal3 = &fadeTab[2];
Palette **tpal4 = 0;
int len = _flags.use16ColorMode ? 48 : 768;
_res->loadFileToBuf("LITEPAL1.COL", (*tpal1)->getData(), len);
tpal2 = _screen->generateFadeTable(tpal3, 0, *tpal1, 21);
_res->loadFileToBuf("LITEPAL2.COL", (*tpal2)->getData(), len);
tpal4 = tpal2++;
_res->loadFileToBuf("LITEPAL3.COL", (*tpal1)->getData(), len);
_screen->generateFadeTable(tpal2, *tpal4, *tpal1, 4);
for (int i = 0; i < 21; i++) {
uint32 etime = _system->getMillis() + 20 * _tickLength;
mov->displayFrame(i, 0, 0, 0, 0, 0, 0);
_screen->setScreenPalette(**tpal3++);
_screen->updateScreen();
if (i == 2 || i == 5 || i == 8 || i == 11 || i == 13 || i == 15 || i == 17 || i == 19)
snd_playSoundEffect(95, -1);
delayUntil(etime);
}
snd_playSoundEffect(91, -1);
_screen->fadePalette(**tpal3++, 300);
for (int i = 22; i < 38; i++) {
uint32 etime = _system->getMillis() + 12 * _tickLength;
mov->displayFrame(i, 0, 0, 0, 0, 0, 0);
if (i == 22 || i == 24 || i == 28 || i == 32) {
snd_playSoundEffect(131, -1);
_screen->setScreenPalette(**tpal3++);
}
_screen->updateScreen();
delayUntil(etime);
}
mov->close();
delete mov;
for (int i = 0; i < 28; i++)
delete fadeTab[i];
_screen->showMouse();
return 1;
}
int LoLEngine::olol_disableControls(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_disableControls(%p) (%d)", (const void *)script, stackPos(0));
return gui_disableControls(stackPos(0));
}
int LoLEngine::olol_enableControls(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_enableControls(%p)", (const void *)script);
return gui_enableControls();
}
int LoLEngine::olol_shakeScene(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_shakeScene(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
shakeScene(stackPos(0), stackPos(1), stackPos(2), 1);
return 1;
}
int LoLEngine::olol_gasExplosion(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_gasExplosion(%p) (%d)", (const void *)script, stackPos(0));
processGasExplosion(stackPos(0));
return 1;
}
int LoLEngine::olol_calcNewBlockPosition(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_calcNewBlockPosition(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
return calcNewBlockPosition(stackPos(0), stackPos(1));
}
int LoLEngine::olol_fadeScene(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_fadeScene(%p)", (const void *)script);
gui_drawScene(2);
transformRegion(112, 0, 112, 0, 176, 120, 2, 0);
updateDrawPage2();
return 1;
}
int LoLEngine::olol_updateDrawPage2(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_updateDrawPage2(%p)", (const void *)script);
updateDrawPage2();
return 1;
}
int LoLEngine::olol_setMouseCursor(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_setMouseCursor(%p) (%d)", (const void *)script, stackPos(0));
if (stackPos(0) == 1)
setMouseCursorToIcon(133);
else
setMouseCursorToItemInHand();
return 1;
}
int LoLEngine::olol_characterSays(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_characterSays(%p) (%d, %d, %d)", (const void *)script, stackPos(0), stackPos(1), stackPos(2));
if (!_flags.isTalkie)
return 0;
if (stackPos(0) == -1) {
snd_stopSpeech(true);
return 1;
}
if (stackPos(0) != -2)
return characterSays(stackPos(0), stackPos(1), stackPos(2));
else
return snd_updateCharacterSpeech();
}
int LoLEngine::olol_queueSpeech(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_queueSpeech(%p) (%d, %d)", (const void *)script, stackPos(0), stackPos(1));
if (stackPos(0) && stackPos(1)) {
_nextSpeechId = stackPos(0) + 1000;
_nextSpeaker = stackPos(1);
}
return 1;
}
int LoLEngine::olol_getItemPrice(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getItemPrice(%p) (%d)", (const void *)script, stackPos(0));
int c = stackPos(0);
if (c < 0) {
c = -c;
if (c < 50)
return 50;
c = (c + 99) / 100;
return c * 100;
} else {
for (int i = 0; i < 46; i++) {
if (_itemCost[i] >= c)
return _itemCost[i];
}
}
return 0;
}
int LoLEngine::olol_getLanguage(EMCState *script) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::olol_getLanguage(%p)", (const void *)script);
return _lang;
}
#pragma mark -
int LoLEngine::tlol_setupPaletteFade(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_setupPaletteFade(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
_screen->getFadeParams(_screen->getPalette(0), param[0], _tim->_palDelayInc, _tim->_palDiff);
_tim->_palDelayAcc = 0;
return 1;
}
int LoLEngine::tlol_loadPalette(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_loadPalette(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
const char *palFile = (const char *)(tim->text + READ_LE_UINT16(tim->text + (param[0]<<1)));
_screen->loadPalette(palFile, _screen->getPalette(0));
return 1;
}
int LoLEngine::tlol_setupPaletteFadeEx(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_setupPaletteFadeEx(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
_screen->copyPalette(0, 1);
_screen->getFadeParams(_screen->getPalette(0), param[0], _tim->_palDelayInc, _tim->_palDiff);
_tim->_palDelayAcc = 0;
return 1;
}
int LoLEngine::tlol_processWsaFrame(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_processWsaFrame(%p, %p) (%d, %d, %d, %d, %d)",
(const void *)tim, (const void *)param, param[0], param[1], param[2], param[3], param[4]);
const int animIndex = tim->wsa[param[0]].anim - 1;
const int frame = param[1];
const int x2 = param[2];
const int y2 = param[3];
const int factor = MAX<int>(0, (int16)param[4]);
const int x1 = _animator->getAnimX(animIndex);
const int y1 = _animator->getAnimY(animIndex);
const Movie *wsa = _animator->getWsaCPtr(animIndex);
int w1 = wsa->width();
int h1 = wsa->height();
int w2 = (w1 * factor) / 100;
int h2 = (h1 * factor) / 100;
_animator->displayFrame(animIndex, 2, frame);
_screen->wsaFrameAnimationStep(x1, y1, x2, y2, w1, h1, w2, h2, 2, _flags.isDemo && _flags.platform != Common::kPlatformPC98 ? 0 : 8, 0);
if (!_flags.isDemo && _flags.platform != Common::kPlatformPC98)
_screen->checkedPageUpdate(8, 4);
_screen->updateScreen();
return 1;
}
int LoLEngine::tlol_displayText(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_displayText(%p, %p) (%d, %d)", (const void *)tim, (const void *)param, param[0], (int16)param[1]);
if (tim->isLoLOutro)
_tim->displayText(param[0], param[1], param[2]);
else
_tim->displayText(param[0], param[1]);
return 1;
}
int LoLEngine::tlol_initSceneWindowDialogue(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_initSceneWindowDialogue(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
initSceneWindowDialogue(param[0]);
return 1;
}
int LoLEngine::tlol_restoreAfterSceneWindowDialogue(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_restoreAfterSceneWindowDialogue(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
restoreAfterSceneWindowDialogue(param[0]);
return 1;
}
int LoLEngine::tlol_giveItem(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_giveItem(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
int item = makeItem(param[0], param[1], param[2]);
if (addItemToInventory(item))
return 1;
deleteItem(item);
return 0;
}
int LoLEngine::tlol_setPartyPosition(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_setPartyPosition(%p, %p) (%d, %d)", (const void *)tim, (const void *)param, param[0], param[1]);
if (param[0] == 1) {
_currentDirection = param[1];
} else if (param[0] == 0) {
_currentBlock = param[1];
calcCoordinates(_partyPosX, _partyPosY, _currentBlock, 0x80, 0x80);
}
return 1;
}
int LoLEngine::tlol_fadeClearWindow(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_fadeClearWindow(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
switch (param[0]) {
case 0:
_screen->fadeClearSceneWindow(10);
break;
case 1:
if (_flags.use16ColorMode) {
_screen->fadePalette(_screen->getPalette(1), 10);
} else {
_screen->getPalette(3).copy(_screen->getPalette(0), 128);
_screen->loadSpecialColors(_screen->getPalette(3));
_screen->fadePalette(_screen->getPalette(3), 10);
}
_screen->_fadeFlag = 0;
break;
case 2:
_screen->fadeToBlack(10);
break;
case 3:
_screen->loadSpecialColors(_screen->getPalette(3));
_screen->fadePalette(_screen->getPalette(_flags.use16ColorMode ? 1 : 3), 10);
_screen->_fadeFlag = 0;
break;
case 4:
if (_screen->_fadeFlag != 2)
_screen->fadeClearSceneWindow(10);
gui_drawPlayField();
setPaletteBrightness(_screen->getPalette(0), _brightness, _lampEffect);
_screen->_fadeFlag = 0;
break;
case 5:
_screen->loadSpecialColors(_screen->getPalette(3));
_screen->fadePalette(_screen->getPalette(1), 10);
_screen->_fadeFlag = 0;
break;
default:
break;
}
return 1;
}
int LoLEngine::tlol_copyRegion(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_copyRegion(%p, %p) (%d, %d, %d, %d, %d, %d, %d, %d)", (const void *)tim, (const void *)param, param[0], param[1], param[2], param[3], param[4], param[5], param[6], param[7]);
_screen->copyRegion(param[0], param[1], param[2], param[3], param[4], param[5], param[6], param[7], Screen::CR_NO_P_CHECK);
if (!param[7])
_screen->updateScreen();
return 1;
}
int LoLEngine::tlol_characterChat(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_characterChat(%p, %p) (%d, %d, %d)", (const void *)tim, (const void *)param, param[0], param[1], param[2]);
playCharacterScriptChat(param[0], param[1], 1, getLangString(param[2]), 0, param, 3);
return 1;
}
int LoLEngine::tlol_drawScene(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_drawScene(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
gui_drawScene(param[0]);
//if (_sceneDrawPage2 != 2 && param[0] == 2)
// _screen->copyRegion(112 << 3, 0, 112 << 3, 0, 176 << 3, 120, _sceneDrawPage2, 2, Screen::CR_NO_P_CHECK);
return 1;
}
int LoLEngine::tlol_update(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_update(%p, %p)", (const void *)tim, (const void *)param);
update();
return 1;
}
int LoLEngine::tlol_clearTextField(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_clearTextField(%p, %p)", (const void *)tim, (const void *)param);
if (_currentControlMode && !textEnabled())
return 1;
_screen->setScreenDim(5);
const ScreenDim *d = _screen->_curDim;
_screen->fillRect(d->sx, d->sy, d->sx + d->w - (_flags.use16ColorMode ? 3 : 2), d->sy + d->h - 2, d->unkA);
_txt->clearDim(4);
_txt->resetDimTextPositions(4);
return 1;
}
int LoLEngine::tlol_loadSoundFile(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_loadSoundFile(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
snd_loadSoundFile(param[0]);
return 1;
}
int LoLEngine::tlol_playMusicTrack(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_playMusicTrack(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
snd_playTrack(param[0]);
return 1;
}
int LoLEngine::tlol_playDialogueTalkText(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_playDialogueTalkText(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
if (!snd_playCharacterSpeech(param[0], 0, 0) || textEnabled())
_txt->printDialogueText(4, getLangString(param[0]), 0, param, 1);
return 1;
}
int LoLEngine::tlol_playSoundEffect(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_playSoundEffect(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
snd_playSoundEffect(param[0], -1);
return 1;
}
int LoLEngine::tlol_startBackgroundAnimation(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_startBackgroundAnimation(%p, %p) (%d, %d)", (const void *)tim, (const void *)param, param[0], param[1]);
_animator->start(param[0], param[1]);
return 1;
}
int LoLEngine::tlol_stopBackgroundAnimation(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_stopBackgroundAnimation(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
_animator->stop(param[0]);
return 1;
}
int LoLEngine::tlol_fadeInScene(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_fadeInScene(%p, %p) (%d, %d)", (const void *)tim, (const void *)param, param[0], param[1]);
const char *sceneFile = (const char *)(tim->text + READ_LE_UINT16(tim->text + (param[0]<<1)));
const char *overlayFile = (const char *)(tim->text + READ_LE_UINT16(tim->text + (param[1]<<1)));
_screen->copyRegion(0, 0, 0, 0, 320, 200, 0, 2, Screen::CR_NO_P_CHECK);
char filename[32];
strcpy(filename, sceneFile);
strcat(filename, ".CPS");
_screen->loadBitmap(filename, 7, 5, &_screen->getPalette(0));
uint8 *overlay = 0;
if (!_flags.use16ColorMode) {
filename[0] = 0;
if (_flags.isTalkie) {
strcpy(filename, _languageExt[_lang]);
strcat(filename, "/");
}
strcat(filename, overlayFile);
overlay = _res->fileData(filename, 0);
for (int i = 0; i < 3; ++i) {
uint32 endTime = _system->getMillis() + 10 * _tickLength;
_screen->copyBlockAndApplyOverlayOutro(4, 2, overlay);
_screen->copyRegion(0, 0, 0, 0, 320, 200, 2, 0, Screen::CR_NO_P_CHECK);
_screen->updateScreen();
delayUntil(endTime);
}
}
_screen->copyRegion(0, 0, 0, 0, 320, 200, 4, 0, Screen::CR_NO_P_CHECK);
if (_flags.use16ColorMode) {
_screen->fadePalette(_screen->getPalette(0), 5);
} else {
_screen->updateScreen();
delete[] overlay;
}
return 1;
}
int LoLEngine::tlol_unusedResourceFunc(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_unusedResourceFunc(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
// The original used 0x6 / 0x7 for some resource caching, we don't need this.
return 1;
}
int LoLEngine::tlol_fadeInPalette(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_fadeInPalette(%p, %p) (%d, %d)", (const void *)tim, (const void *)param, param[0], param[1]);
const char *bitmap = (const char *)(tim->text + READ_LE_UINT16(tim->text + (param[0]<<1)));
Palette pal(_screen->getPalette(0).getNumColors());
_screen->loadBitmap(bitmap, 3, 3, &pal);
if (_flags.use16ColorMode) {
_screen->getPalette(0).clear();
_screen->setScreenPalette(_screen->getPalette(0));
_screen->copyPage(2, 0);
}
_screen->fadePalette(pal, param[1]);
return 1;
}
int LoLEngine::tlol_fadeOutSound(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_fadeOutSound(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
_sound->beginFadeOut();
return 1;
}
int LoLEngine::tlol_displayAnimFrame(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_displayAnimFrame(%p, %p) (%d, %d)", (const void *)tim, (const void *)param, param[0], param[1]);
const int animIndex = tim->wsa[param[0]].anim - 1;
const Movie *wsa = _animator->getWsaCPtr(animIndex);
if (param[1] == 0xFFFF) {
_screen->copyRegion(0, 0, 0, 0, 320, 200, 0, 2, Screen::CR_NO_P_CHECK);
} else {
_animator->displayFrame(animIndex, 2, param[1], 0);
_screen->copyRegion(wsa->xAdd(), wsa->yAdd(), wsa->xAdd(), wsa->yAdd(), wsa->width(), wsa->height(), 2, 0);
}
return 1;
}
int LoLEngine::tlol_delayForChat(const TIM *tim, const uint16 *param) {
debugC(3, kDebugLevelScriptFuncs, "LoLEngine::tlol_delayForChat(%p, %p) (%d)", (const void *)tim, (const void *)param, param[0]);
if (!speechEnabled())
delay(param[0]);
return 1;
}
#pragma mark -
typedef Common::Functor1Mem<EMCState *, int, LoLEngine> OpcodeV2;
#define SetOpcodeTable(x) table = &x;
#define Opcode(x) table->push_back(new OpcodeV2(this, &LoLEngine::x))
#define OpcodeUnImpl() table->push_back(new OpcodeV2(this, 0))
typedef Common::Functor2Mem<const TIM *, const uint16 *, int, LoLEngine> TIMOpcodeLoL;
#define SetTimOpcodeTable(x) timTable = &x;
#define OpcodeTim(x) timTable->push_back(new TIMOpcodeLoL(this, &LoLEngine::x))
#define OpcodeTimUnImpl() timTable->push_back(new TIMOpcodeLoL(this, 0))
void LoLEngine::setupOpcodeTable() {
Common::Array<const Opcode *> *table = 0;
_opcodes.reserve(192);
SetOpcodeTable(_opcodes);
// 0x00
Opcode(olol_setWallType);
Opcode(olol_getWallType);
Opcode(olol_drawScene);
Opcode(olol_rollDice);
// 0x04
Opcode(olol_moveParty);
OpcodeUnImpl();
Opcode(olol_delay);
Opcode(olol_setGameFlag);
// 0x08
Opcode(olol_testGameFlag);
Opcode(olol_loadLevelGraphics);
Opcode(olol_loadBlockProperties);
Opcode(olol_loadMonsterShapes);
// 0x0C
Opcode(olol_deleteHandItem);
Opcode(olol_allocItemPropertiesBuffer);
Opcode(olol_setItemProperty);
Opcode(olol_makeItem);
// 0x10
Opcode(olol_placeMoveLevelItem);
Opcode(olol_createLevelItem);
Opcode(olol_getItemPara);
Opcode(olol_getCharacterStat);
// 0x14
Opcode(olol_setCharacterStat);
Opcode(olol_loadLevelShapes);
Opcode(olol_closeLevelShapeFile);
OpcodeUnImpl();
// 0x18
Opcode(olol_loadDoorShapes);
Opcode(olol_initAnimStruct);
Opcode(olol_playAnimationPart);
Opcode(olol_freeAnimStruct);
// 0x1C
Opcode(olol_getDirection);
Opcode(olol_characterSurpriseFeedback);
Opcode(olol_setMusicTrack);
Opcode(olol_setSequenceButtons);
// 0x20
Opcode(olol_setDefaultButtonState);
Opcode(olol_checkRectForMousePointer);
Opcode(olol_clearDialogueField);
Opcode(olol_setupBackgroundAnimationPart);
// 0x24
Opcode(olol_startBackgroundAnimation);
Opcode(o1_hideMouse);
Opcode(o1_showMouse);
Opcode(olol_fadeToBlack);
// 0x28
Opcode(olol_fadePalette);
Opcode(olol_loadBitmap);
Opcode(olol_stopBackgroundAnimation);
OpcodeUnImpl();
// 0x2C
OpcodeUnImpl();
Opcode(olol_getGlobalScriptVar);
Opcode(olol_setGlobalScriptVar);
Opcode(olol_getGlobalVar);
// 0x30
Opcode(olol_setGlobalVar);
Opcode(olol_triggerDoorSwitch);
Opcode(olol_checkEquippedItemScriptFlags);
Opcode(olol_setDoorState);
// 0x34
Opcode(olol_updateBlockAnimations);
Opcode(olol_mapShapeToBlock);
Opcode(olol_resetBlockShapeAssignment);
Opcode(olol_copyRegion);
// 0x38
Opcode(olol_initMonster);
Opcode(olol_fadeClearSceneWindow);
Opcode(olol_fadeSequencePalette);
Opcode(olol_redrawPlayfield);
// 0x3C
Opcode(olol_loadNewLevel);
Opcode(olol_getNearestMonsterFromCharacter);
Opcode(olol_dummy0);
Opcode(olol_loadMonsterProperties);
// 0x40
Opcode(olol_battleHitSkillTest);
Opcode(olol_inflictDamage);
OpcodeUnImpl();
OpcodeUnImpl();
// 0x44
Opcode(olol_moveMonster);
Opcode(olol_dialogueBox);
Opcode(olol_giveTakeMoney);
Opcode(olol_checkMoney);
// 0x48
Opcode(olol_setScriptTimer);
Opcode(olol_createHandItem);
Opcode(olol_playAttackSound);
Opcode(olol_characterJoinsParty);
// 0x4C
Opcode(olol_giveItem);
OpcodeUnImpl();
Opcode(olol_loadTimScript);
Opcode(olol_runTimScript);
// 0x50
Opcode(olol_releaseTimScript);
Opcode(olol_initSceneWindowDialogue);
Opcode(olol_restoreAfterSceneWindowDialogue);
Opcode(olol_getItemInHand);
// 0x54
Opcode(olol_checkMagic);
Opcode(olol_giveItemToMonster);
Opcode(olol_loadLangFile);
Opcode(olol_playSoundEffect);
// 0x58
Opcode(olol_processDialogue);
Opcode(olol_stopTimScript);
Opcode(olol_getWallFlags);
Opcode(olol_changeMonsterStat);
// 0x5C
Opcode(olol_getMonsterStat);
Opcode(olol_releaseMonsterShapes);
Opcode(olol_playCharacterScriptChat);
Opcode(olol_update);
// 0x60
Opcode(olol_playEnvironmentalSfx);
Opcode(olol_healCharacter);
Opcode(olol_drawExitButton);
Opcode(olol_loadSoundFile);
// 0x64
Opcode(olol_playMusicTrack);
Opcode(olol_deleteMonstersFromBlock);
Opcode(olol_countBlockItems);
Opcode(olol_characterSkillTest);
// 0x68
Opcode(olol_countAllMonsters);
Opcode(olol_playEndSequence);
Opcode(olol_stopPortraitSpeechAnim);
Opcode(olol_setPaletteBrightness);
// 0x6C
Opcode(olol_calcInflictableDamage);
Opcode(olol_getInflictedDamage);
Opcode(olol_checkForCertainPartyMember);
Opcode(olol_printMessage);
// 0x70
Opcode(olol_deleteLevelItem);
Opcode(olol_calcInflictableDamagePerItem);
Opcode(olol_distanceAttack);
Opcode(olol_removeCharacterEffects);
// 0x74
Opcode(olol_checkInventoryFull);
Opcode(olol_objectLeavesLevel);
OpcodeUnImpl();
OpcodeUnImpl();
// 0x78
Opcode(olol_addSpellToScroll);
Opcode(olol_playDialogueText);
Opcode(olol_playDialogueTalkText);
Opcode(olol_checkMonsterTypeHostility);
// 0x7C
Opcode(olol_setNextFunc);
Opcode(olol_dummy1);
OpcodeUnImpl();
Opcode(olol_suspendMonster);
// 0x80
Opcode(olol_setScriptTextParameter);
Opcode(olol_triggerEventOnMouseButtonClick);
Opcode(olol_printWindowText);
Opcode(olol_countSpecificMonsters);
// 0x84
Opcode(olol_updateBlockAnimations2);
Opcode(olol_checkPartyForItemType);
Opcode(olol_blockDoor);
Opcode(olol_resetTimDialogueState);
// 0x88
Opcode(olol_getItemOnPos);
Opcode(olol_removeLevelItem);
Opcode(olol_savePage5);
Opcode(olol_restorePage5);
// 0x8C
Opcode(olol_initDialogueSequence);
Opcode(olol_restoreAfterDialogueSequence);
Opcode(olol_setSpecialSceneButtons);
Opcode(olol_restoreButtonsAfterSpecialScene);
// 0x90
OpcodeUnImpl();
OpcodeUnImpl();
Opcode(olol_prepareSpecialScene);
Opcode(olol_restoreAfterSpecialScene);
// 0x94
Opcode(olol_assignCustomSfx);
OpcodeUnImpl();
Opcode(olol_findAssignedMonster);
Opcode(olol_checkBlockForMonster);
// 0x98
Opcode(olol_transformRegion);
Opcode(olol_calcCoordinatesAddDirectionOffset);
Opcode(olol_resetPortraitsAndDisableSysTimer);
Opcode(olol_enableSysTimer);
// 0x9C
Opcode(olol_checkNeedSceneRestore);
Opcode(olol_getNextActiveCharacter);
Opcode(olol_paralyzePoisonCharacter);
Opcode(olol_drawCharPortrait);
// 0xA0
Opcode(olol_removeInventoryItem);
OpcodeUnImpl();
OpcodeUnImpl();
Opcode(olol_getAnimationLastPart);
// 0xA4
Opcode(olol_assignSpecialGuiShape);
Opcode(olol_findInventoryItem);
Opcode(olol_restoreFadePalette);
Opcode(olol_calcNewBlockPosition);
// 0xA8
Opcode(olol_getSelectedCharacter);
Opcode(olol_setHandItem);
Opcode(olol_drinkBezelCup);
Opcode(olol_changeItemTypeOrFlag);
// 0xAC
Opcode(olol_placeInventoryItemInHand);
Opcode(olol_castSpell);
Opcode(olol_pitDrop);
Opcode(olol_increaseSkill);
// 0xB0
Opcode(olol_paletteFlash);
Opcode(olol_restoreMagicShroud);
Opcode(olol_dummy1); // anim buffer select?
Opcode(olol_disableControls);
// 0xB4
Opcode(olol_enableControls);
Opcode(olol_shakeScene);
Opcode(olol_gasExplosion);
Opcode(olol_calcNewBlockPosition);
// 0xB8
Opcode(olol_fadeScene);
Opcode(olol_updateDrawPage2);
Opcode(olol_setMouseCursor);
Opcode(olol_characterSays);
// 0xBC
Opcode(olol_queueSpeech);
Opcode(olol_getItemPrice);
Opcode(olol_getLanguage);
Opcode(olol_dummy0);
Common::Array<const TIMOpcode *> *timTable = 0;
_timIntroOpcodes.reserve(8);
SetTimOpcodeTable(_timIntroOpcodes);
// 0x00
OpcodeTim(tlol_setupPaletteFade);
OpcodeTimUnImpl();
OpcodeTim(tlol_loadPalette);
OpcodeTim(tlol_setupPaletteFadeEx);
// 0x04
OpcodeTim(tlol_processWsaFrame);
OpcodeTim(tlol_displayText);
OpcodeTimUnImpl();
OpcodeTimUnImpl();
_timOutroOpcodes.reserve(16);
SetTimOpcodeTable(_timOutroOpcodes);
// 0x00
OpcodeTim(tlol_setupPaletteFade);
OpcodeTimUnImpl();
OpcodeTim(tlol_loadPalette);
OpcodeTim(tlol_setupPaletteFadeEx);
// 0x04
OpcodeTimUnImpl();
OpcodeTim(tlol_fadeInScene);
OpcodeTim(tlol_unusedResourceFunc);
OpcodeTim(tlol_unusedResourceFunc);
// 0x08
OpcodeTim(tlol_fadeInPalette);
OpcodeTimUnImpl();
OpcodeTimUnImpl();
OpcodeTim(tlol_fadeOutSound);
// 0x0C
OpcodeTim(tlol_displayAnimFrame);
OpcodeTim(tlol_delayForChat);
OpcodeTim(tlol_displayText);
OpcodeTimUnImpl();
_timIngameOpcodes.reserve(17);
SetTimOpcodeTable(_timIngameOpcodes);
// 0x00
OpcodeTim(tlol_initSceneWindowDialogue);
OpcodeTim(tlol_restoreAfterSceneWindowDialogue);
OpcodeTimUnImpl();
OpcodeTim(tlol_giveItem);
// 0x04
OpcodeTim(tlol_setPartyPosition);
OpcodeTim(tlol_fadeClearWindow);
OpcodeTim(tlol_copyRegion);
OpcodeTim(tlol_characterChat);
// 0x08
OpcodeTim(tlol_drawScene);
OpcodeTim(tlol_update);
OpcodeTim(tlol_clearTextField);
OpcodeTim(tlol_loadSoundFile);
// 0x0C
OpcodeTim(tlol_playMusicTrack);
OpcodeTim(tlol_playDialogueTalkText);
OpcodeTim(tlol_playSoundEffect);
OpcodeTim(tlol_startBackgroundAnimation);
// 0x10
OpcodeTim(tlol_stopBackgroundAnimation);
}
} // End of namespace Kyra
#endif // ENABLE_LOL<|fim▁end|>
|
}
int LoLEngine::olol_getInflictedDamage(EMCState *script) {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright (c) Pymatgen Development Team.<|fim▁hole|># Distributed under the terms of the MIT License.
__author__ = 'Anubhav Jain'
__copyright__ = 'Copyright 2014, The Materials Project'
__version__ = '0.1'
__maintainer__ = 'Anubhav Jain'
__email__ = '[email protected]'
__date__ = 'Oct 03, 2014'<|fim▁end|>
| |
<|file_name|>person_mng.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from datetime import datetime
from dateutil.relativedelta import relativedelta
from openerp import api, fields, models
from openerp.exceptions import UserError
class PersonManagement(models.Model):
_name = 'myo.person.mng'
name = fields.Char('Name', required=True)
alias = fields.Char('Alias', help='Common name that the Person is referred.')
code = fields.Char(string='Person Code', required=False)
notes = fields.Text(string='Notes')
date_inclusion = fields.Datetime("Inclusion Date", required=False, readonly=False,
default=lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
batch_name = fields.Char('Batch Name', required=False)
country_id_2 = fields.Many2one('res.country', 'Nationality')
birthday = fields.Date("Date of Birth")
age = fields.Char(
string='Age',
compute='_compute_age',
store=True
)
estimated_age = fields.Char(string='Estimated Age', required=False)
spouse_name = fields.Char('Spouse Name')
spouse_id = fields.Many2one('myo.person', 'Spouse', ondelete='restrict')
father_name = fields.Char('Father Name')
father_id = fields.Many2one('myo.person', 'Father', ondelete='restrict')
mother_name = fields.Char('Mother Name')
mother_id = fields.Many2one('myo.person', 'Mother', ondelete='restrict')
responsible_name = fields.Char('Responsible Name')
responsible_id = fields.Many2one('myo.person', 'Responsible', ondelete='restrict')
identification_id = fields.Char('Person ID')
otherid = fields.Char('Other ID')
gender = fields.Selection(
[('M', 'Male'),
('F', 'Female')
], 'Gender'
)
marital = fields.Selection(
[('single', 'Single'),
('married', 'Married'),
('widower', 'Widower'),
('divorced', 'Divorced'),
], 'Marital Status'
)
active = fields.Boolean('Active',
help="If unchecked, it will allow you to hide the person without removing it.",
default=1)
person_id = fields.Many2one('myo.person', 'Person')
_order = 'name'
_sql_constraints = [
('code_uniq',
'UNIQUE(code)',<|fim▁hole|> u'Error! The Person Code must be unique!'
)
]
@api.multi
@api.constrains('birthday')
def _check_birthday(self):
for person in self:
if person.birthday > fields.Date.today():
raise UserError(u'Error! Date of Birth must be in the past!')
@api.one
@api.depends('birthday')
def _compute_age(self):
now = datetime.now()
if self.birthday:
dob = datetime.strptime(self.birthday, '%Y-%m-%d')
delta = relativedelta(now, dob)
# self.age = str(delta.years) + "y " + str(delta.months) + "m " + str(delta.days) + "d"
self.age = str(delta.years)
else:
self.age = "No Date of Birth!"<|fim▁end|>
| |
<|file_name|>CIA.py<|end_file_name|><|fim▁begin|>from ImportDependence import *
from CustomClass import *
class CIA(AppForm):
useddf=pd.DataFrame()
Lines = []
Tags = []
description = 'Chemical Index of Alteration'
unuseful = ['Name',
'Mineral',
'Author',
'DataType',
'Label',
'Marker',
'Color',
'Size',
'Alpha',
'Style',
'Width',
'Tag']
reference = '''
CIA = [Al2O3/(Al2O3+CaO*+Na2O+K2O]×100
ICV = (Fe2O3+K2O+Na2O+CaO*+MgO+MnO+TiO2)/Al2O3 (Cox,1995)
PIA = {(Al2O3-K2O)/[(Al2O3-K2O)+CaO*+Na2O]}×100
CIW = [Al2O3/(Al2O3+CaO*+Na2O)]×100
CIW' = [Al2O3/(Al2O3+Na2O)]×100
where CaO* is the amount of CaO incorporated in the silicate fraction of the rock.
CaO* = CaO - (10/3 * P2O5)
if CaO* < Na2O:
CaO* = CaO*
else:
CaO* = Na2O
References:
Nesbitt-CIA-1982
Harnois-CIW-1988
Mclennan-CIA-1993
Cox R-ICV-1995
Fedo-PIA-1995
Cullers-CIW'-2000
Song B W-2013
Cox R, Lowe D R, Cullers R L. The influence of sediment recycling and basement composition on evolution of mudrock chemistry in the southwestern United States[J]. Geochimica Et Cosmochimica Acta, 1995, 59(14):2919-2940.
Harnois, L., 1988, The CIW index: A new chemical index of weathering: Sedimentary Geology, v. 55, p. 319–322. doi:10.1016/0037-0738(88)90137-6
Nesbitt, H.W., and Young, G.M., 1982, Early Proterozoic climates and plate motions inferred from major element chemistry of lutites: Nature, v. 299, p. 715–717. doi:10.1038/299715a0
'''
BaseMass = {'SiO2': 60.083,
'TiO2': 79.865,
'Al2O3': 101.960077,
'TFe2O3': 159.687,
'Fe2O3': 159.687,
'TFeO': 71.844,
'FeO': 71.844,
'MnO': 70.937044,
'MgO': 40.304,
'CaO': 56.077000000000005,
'Na2O': 61.978538560000004,
'K2O': 94.1956,
'P2O5': 141.942523996,
'CO2': 44.009,
'SO3': 80.057,
'FeO': 71.844,
'Fe3O4': 231.531,
'BaO': 153.326,
'SrO': 103.619,
'Cr2O3': 151.98919999999998,
}
def __init__(self, parent=None, df=pd.DataFrame()):
QMainWindow.__init__(self, parent)
self.setWindowTitle('Chemical Index of Alteration & Index of Compositional Variability')
self.items = []
self._df = df
self._df.reindex()
if (len(df) > 0):
self._changed = True
# print('DataFrame recieved to CIA')
self.raw = df
self.raw = self.CleanDataFile(df)
self.rawitems = self.raw.columns.values.tolist()
for i in self.rawitems:
if i not in self.unuseful:
self.items.append(i)
else:
pass
self.create_main_frame()
self.create_status_bar()
def create_main_frame(self):
self.resize(800,600)
self.main_frame = QWidget()
self.dpi = 128
self.setWindowTitle('Chemical Index of Alteration & Index of Compositional Variability')
self.tableView = CustomQTableView(self.main_frame)
self.tableView.setObjectName('tableView')
self.tableView.setSortingEnabled(True)
self.textbox = GrowingTextEdit(self)
self.textbox.setText(self.reference)
# Other GUI controls
self.save_button = QPushButton('&Save')
self.save_button.clicked.connect(self.saveDataFile)
#
# Layout with box sizers
#
self.hbox = QHBoxLayout()
for w in [self.save_button]:
self.hbox.addWidget(w)
self.hbox.setAlignment(w, Qt.AlignVCenter)
self.vbox = QVBoxLayout()
self.vbox.addWidget(self.tableView)
#self.vbox.addWidget(self.tableView)
self.vbox.addLayout(self.hbox)
self.vbox.addWidget(self.textbox)
self.main_frame.setLayout(self.vbox)
self.setCentralWidget(self.main_frame)
def Read(self, inpoints):
points = []
for i in inpoints:
points.append(i.split())
result = []
for i in points:
for l in range(len(i)):
a = float((i[l].split(','))[0])
a = a * self.x_scale
b = float((i[l].split(','))[1])
b = (self.height_load - b) * self.y_scale
result.append((a, b))
return (result)
def CIA(self):
self.WholeData = []
dataframe=pd.DataFrame()
dataframe = self._df
#dataframe.set_index('Label')
ItemsAvalibale = dataframe.columns.values.tolist()
Indexes = dataframe.index.values.tolist()
#ItemsToCheck = ['Label','SiO2','Al2O3','Fe2O3','MgO','CaO','Na2O','K2O','P2O5','MnO','TiO2']
ItemsToTest = ['Number', 'Tag', 'Name', 'Author', 'DataType', 'Marker', 'Color', 'Size', 'Alpha',
'Style', 'Width']
for i in ItemsAvalibale:
if 'O' not in i and i !='Label':
dataframe = dataframe.drop(i, 1)
WholeItemsAvalibale = dataframe.columns.values.tolist()
ItemsAvalibale = dataframe.columns.values.tolist()
Indexes = dataframe.index.values.tolist()
if 'Whole' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('Whole')
if 'CIA' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('CIA')
if 'ICV' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('ICV')
if 'PIA' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('PIA')
if 'CIW' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('CIW')
if 'CIW\'' not in WholeItemsAvalibale:
WholeItemsAvalibale.append('CIW\'')
print('index',Indexes,'\ncolums',WholeItemsAvalibale)
WholeMole=[]
WholeList=[]
dataframe = dataframe.dropna(axis=1,how='all')
print(dataframe)
for j in Indexes:
tmpList=[]
tmpMoleSum=0
tmpcia=0
tmpAl2O3=0
tmpCaO=0
tmpNa2O=0
tmpK2O=0
tmpP2O5=0
tmpFe2O3=0
tmpMgO=0
tmpMnO=0
tmpTiO2=0
#ICV =(Fe2O3+K2O+Na2O+CaO*+MgO+MnO+TiO2)/Al2O3 (Cox,1995)
for i in ItemsAvalibale:
if i in self.BaseMass:
m=dataframe.at[j,i]
n=self.BaseMass[i]
#print('\nm & n is \t',m,n)
tmpmole= m/n
#print(tmpmole)
tmpMoleSum = tmpMoleSum + tmpmole
#tmpList.append(dataframe.at[i,j])
#print('\n total mole is',tmpMoleSum)
for i in ItemsAvalibale:
if i in self.BaseMass:
tmpdata= 100*(dataframe.at[j,i]/self.BaseMass[i])/tmpMoleSum
tmpList.append(tmpdata)
#print(i, tmpdata)
if i =='Al2O3':
tmpAl2O3=tmpdata<|fim▁hole|> tmpCaO=tmpdata
elif i =='Na2O':
tmpNa2O = tmpdata
elif i =='K2O':
tmpK2O=tmpdata
elif i =='P2O5':
tmpP2O5=tmpdata
elif i =='Fe2O3':
tmpFe2O3=tmpdata
elif i == 'MgO':
tmpMgO = tmpdata
elif i == 'MnO':
tmpMnO = tmpdata
elif i == 'TiO2':
tmpTiO2 = tmpdata
elif i == 'Label' :
tmpdata = dataframe.at[j,i]
tmpList.append(tmpdata)
elif i in WholeItemsAvalibale:
del WholeItemsAvalibale[WholeItemsAvalibale.index(i)]
tmpList.append(tmpMoleSum)
usedCaO=0
middleCaO= tmpCaO-(10/3.0*tmpP2O5)
if middleCaO< tmpNa2O:
usedCaO=middleCaO
else:
usedCaO=tmpNa2O
#print(tmpAl2O3, usedCaO, tmpK2O, tmpNa2O)
CIA=tmpAl2O3/(tmpAl2O3+usedCaO+tmpNa2O+tmpK2O)*100
tmpList.append(CIA)
ICV =(tmpFe2O3+tmpK2O+tmpNa2O+usedCaO+tmpMgO+tmpMnO+tmpTiO2)/tmpAl2O3 #(Cox,1995)
tmpList.append(ICV)
PIA = ((tmpAl2O3-tmpK2O)/(tmpAl2O3-tmpK2O+usedCaO+tmpNa2O))*100
tmpList.append(PIA)
CIW = (tmpAl2O3/(tmpAl2O3+usedCaO+tmpNa2O))*100
tmpList.append(CIW)
CIW2 = (tmpAl2O3/(tmpAl2O3+tmpNa2O))*100
tmpList.append(CIW2)
'''
CIA = [Al2O3/(Al2O3+CaO*+Na2O+K2O]×100
ICV = (Fe2O3+K2O+Na2O+CaO*+MgO+MnO+TiO2)/Al2O3 (Cox,1995)
PIA = {(Al2O3-K2O)/[(Al2O3-K2O)+CaO*+Na2O]}×100
CIW = [Al2O3/(Al2O3+CaO*+Na2O)]×100
CIW' = [Al2O3/(Al2O3+Na2O)]×100
'''
#print(len(tmpList))
WholeList.append(tmpList)
pass
print(len(WholeList))
print(len(WholeItemsAvalibale))
df = pd.DataFrame(WholeList,columns=WholeItemsAvalibale)
self.useddf = df
self.tableView.setModel(PandasModel(self.useddf))
self.show()
def saveDataFile(self):
# if self.model._changed == True:
# print('changed')
# print(self.model._df)
DataFileOutput, ok2 = QFileDialog.getSaveFileName(self,
'文件保存',
'C:/',
'Excel Files (*.xlsx);;CSV Files (*.csv)') # 数据文件保存输出
if (DataFileOutput != ''):
if ('csv' in DataFileOutput):
self.useddf.to_csv(DataFileOutput, sep=',', encoding='utf-8')
elif ('xls' in DataFileOutput):
self.useddf.to_excel(DataFileOutput, encoding='utf-8')<|fim▁end|>
|
elif i =='CaO':
|
<|file_name|>ausrq.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core'<|fim▁hole|> template: `
<span [attr.aria-label]="msg" [hidden]="false">This is a dummy component for Ausrq</span>
<div (click)="doNothing($event)"></div>
`,
})
export class Ausrq {
msg: string = 'nothing to say';
doNothing(evt: any) {}
}<|fim▁end|>
|
@Component({
selector: 'd-ausrq',
|
<|file_name|>pub_use_xcrate1.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub struct Foo {
name: int<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>topThick.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# The file is part of the WRL Project.
#
# The WRL Project is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.<|fim▁hole|># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2017, Andrew McConachie, <[email protected]>
import os
import sys
import random
import dns.resolver
numTestDomains = 100
numTopTLDs = 100
ignoreDomains = ['com', 'net', 'jobs', 'cat', 'mil', 'edu', 'gov', 'int', 'arpa']
serverZone = '.ws.sp.am' # DNS Zone containing CNAME records pointing to whois FQDNs
def dbg(s):
# print s
pass
random.seed()
zFiles = os.listdir('zonefiles/')
#dbgFiles = 10 # How many files to read while developing this, remove when finished coding
tlds = []
for zf in zFiles:
# if len(tlds) >= dbgFiles: # For developing, remove when finished coding
# break
dbg(zf)
tld = {}
if zf.find(".txt") == -1:
dbg("This should not happen")
continue
zfh = open('zonefiles/' + zf, 'r')
lines = zfh.read().splitlines()
zfh.close()
dbg("after file read")
tld['name'] = lines[0].split(".")[0].strip()
if tld['name'] in ignoreDomains:
dbg("Ignoring:" + tld['name'])
continue
dbg("after name split")
rrs = []
for line in lines:
rr = line.split("\t")
rrs.append(rr)
dbg("after rr split")
ns = []
for rr in rrs:
if rr[3].lower() == 'ns':
ns.append(rr[0].split(".")[0])
dbg("after counting NS records")
if len(ns) < numTestDomains:
continue
else:
tld['size'] = len(ns)
tld['domains'] = random.sample(ns, numTestDomains)
for d in tld['domains']:
dbg(d + "." + tld['name'])
dbg(tld['name'] + ": " + str(tld['size']))
tlds.append(tld)
tlds.sort(key=lambda tld: tld['size'], reverse=True)
for ii in xrange(numTopTLDs):
# Find FQDN of whois server
d = dns.resolver.Resolver()
try:
resp = d.query(tlds[ii]['name'] + serverZone, 'CNAME')
if len(resp.rrset) < 1:
whois = 'UNKNOWN'
else:
whois = str(resp.rrset[0]).strip('.')
except:
whois = 'UNKNOWN'
s = whois + ','
for dom in tlds[ii]['domains']:
s += dom + '.' + tlds[ii]['name'] + ','
print s.strip(',')<|fim▁end|>
|
#
# The WRL Project is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
<|file_name|>HTMLMediaSource.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013 Google Inc. All rights reserved.
*<|fim▁hole|> * modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "HTMLMediaSource.h"
namespace WebCore {
URLRegistry* HTMLMediaSource::s_registry = 0;
void HTMLMediaSource::setRegistry(URLRegistry* registry)
{
ASSERT(!s_registry);
s_registry = registry;
}
}<|fim▁end|>
|
* Redistribution and use in source and binary forms, with or without
|
<|file_name|>test_lrs.py<|end_file_name|><|fim▁begin|>import requests
LRS = "http://cygnus.ic.uva.nl:8000/XAPI/statements"
u = raw_input("LRS username: ")<|fim▁hole|>r = requests.get(LRS,headers={"X-Experience-API-Version":"1.0"},auth=(u,p));
if r.status_code == 200:
print "Success"
else:
print "Server returns",r.status_code<|fim▁end|>
|
p = raw_input("LRS password: ")
|
<|file_name|>reflector.d.ts<|end_file_name|><|fim▁begin|>import { Type } from 'angular2/src/facade/lang';
import { SetterFn, GetterFn, MethodFn } from './types';
import { PlatformReflectionCapabilities } from './platform_reflection_capabilities';
export { SetterFn, GetterFn, MethodFn } from './types';
export { PlatformReflectionCapabilities } from './platform_reflection_capabilities';
/**
<|fim▁hole|> parameters: any[][];
factory: Function;
interfaces: any[];
propMetadata: {
[key: string]: any[];
};
constructor(annotations?: any[], parameters?: any[][], factory?: Function, interfaces?: any[], propMetadata?: {
[key: string]: any[];
});
}
/**
* Provides access to reflection data about symbols. Used internally by Angular
* to power dependency injection and compilation.
*/
export declare class Reflector {
reflectionCapabilities: PlatformReflectionCapabilities;
constructor(reflectionCapabilities: PlatformReflectionCapabilities);
isReflectionEnabled(): boolean;
/**
* Causes `this` reflector to track keys used to access
* {@link ReflectionInfo} objects.
*/
trackUsage(): void;
/**
* Lists types for which reflection information was not requested since
* {@link #trackUsage} was called. This list could later be audited as
* potential dead code.
*/
listUnusedKeys(): any[];
registerFunction(func: Function, funcInfo: ReflectionInfo): void;
registerType(type: Type, typeInfo: ReflectionInfo): void;
registerGetters(getters: {
[key: string]: GetterFn;
}): void;
registerSetters(setters: {
[key: string]: SetterFn;
}): void;
registerMethods(methods: {
[key: string]: MethodFn;
}): void;
factory(type: Type): Function;
parameters(typeOrFunc: any): any[][];
annotations(typeOrFunc: any): any[];
propMetadata(typeOrFunc: any): {
[key: string]: any[];
};
interfaces(type: Type): any[];
getter(name: string): GetterFn;
setter(name: string): SetterFn;
method(name: string): MethodFn;
importUri(type: Type): string;
}<|fim▁end|>
|
* Reflective information about a symbol, including annotations, interfaces, and other metadata.
*/
export declare class ReflectionInfo {
annotations: any[];
|
<|file_name|>ReadCardTask.java<|end_file_name|><|fim▁begin|>/*
* This file is part of KITCard Reader.
* Ⓒ 2012 Philipp Kern <[email protected]>
*
* KITCard Reader is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* KITCard Reader is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with KITCard Reader. If not, see <http://www.gnu.org/licenses/>.
*/
package de.Ox539.kitcard.reader;
/**
* ReadCardTask: Read an NFC tag using the Wallet class asynchronously.
*
* This class provides the glue calling the Wallet class and passing
* the information back to the Android UI layer. Detailed error
* information is not provided yet.
*
* @author Philipp Kern <[email protected]>
*/
import de.Ox539.kitcard.reader.Wallet.ReadCardResult;
import android.nfc.Tag;
import android.nfc.tech.MifareClassic;
import android.os.AsyncTask;
import android.widget.Toast;
public class ReadCardTask extends AsyncTask<Tag, Integer, Pair<ReadCardResult, Wallet>> {
private ScanActivity mActivity;
public ReadCardTask(ScanActivity activity) {
super();
this.mActivity = activity;
}
protected Pair<ReadCardResult, Wallet> doInBackground(Tag... tags) {
MifareClassic card = null;
try {
card = MifareClassic.get(tags[0]);
} catch (NullPointerException e) {
/* Error while reading card. This problem occurs on HTC devices from the ONE series with Android Lollipop (status of June 2015)
* Try to repair the tag.
*/
card = MifareClassic.get(MifareUtils.repairTag(tags[0]));
}
if(card == null)
return new Pair<ReadCardResult, Wallet>(null, null);
final Wallet wallet = new Wallet(card);
final ReadCardResult result = wallet.readCard();
return new Pair<ReadCardResult, Wallet>(result, wallet);
}
protected void onPostExecute(Pair<ReadCardResult, Wallet> data) {
ReadCardResult result = data.getValue0();
if(result == ReadCardResult.FAILURE) {
// read failed
Toast.makeText(mActivity, mActivity.getResources().getString(R.string.kitcard_read_failed), Toast.LENGTH_LONG).show();
return;<|fim▁hole|> }
final Wallet wallet = data.getValue1();
mActivity.updateCardNumber(wallet.getCardNumber());
mActivity.updateBalance(wallet.getCurrentBalance());
mActivity.updateLastTransaction(wallet.getLastTransactionValue());
mActivity.updateCardIssuer(wallet.getCardIssuer());
mActivity.updateCardType(wallet.getCardType());
}
}<|fim▁end|>
|
} else if(result == ReadCardResult.OLD_STYLE_WALLET) {
// old-style wallet encountered
Toast.makeText(mActivity, mActivity.getResources().getString(R.string.kitcard_needs_reencode), Toast.LENGTH_LONG).show();
return;
|
<|file_name|>save.py<|end_file_name|><|fim▁begin|>import sublime, sublime_plugin
class SaveAllExistingFilesCommand(sublime_plugin.ApplicationCommand):
def run(self):
for w in sublime.windows():<|fim▁hole|> def _save_files_in_window(self, w):
for v in w.views():
self._save_existing_file_in_view(v)
def _save_existing_file_in_view(self, v):
if v.file_name() and v.is_dirty():
v.run_command("save")
r"""
append to file sublime plugin OR api
sublime save dirty file plugin stackoverflow
"""<|fim▁end|>
|
self._save_files_in_window(w)
|
<|file_name|>es.typed.conversions.int32.js<|end_file_name|><|fim▁begin|>import { DESCRIPTORS, LITTLE_ENDIAN } from '../helpers/constants';
if (DESCRIPTORS) QUnit.test('Int32 conversions', assert => {
const int32array = new Int32Array(1);
const uint8array = new Uint8Array(int32array.buffer);
const dataview = new DataView(int32array.buffer);
function viewFrom(it) {
return new DataView(new Uint8Array(it).buffer);
}
function toString(it) {
return it === 0 && 1 / it === -Infinity ? '-0' : it;
}
const data = [
[0, 0, [0, 0, 0, 0]],
[-0, 0, [0, 0, 0, 0]],
[1, 1, [1, 0, 0, 0]],
[-1, -1, [255, 255, 255, 255]],
[1.1, 1, [1, 0, 0, 0]],
[-1.1, -1, [255, 255, 255, 255]],
[1.9, 1, [1, 0, 0, 0]],
[-1.9, -1, [255, 255, 255, 255]],
[127, 127, [127, 0, 0, 0]],
[-127, -127, [129, 255, 255, 255]],
[128, 128, [128, 0, 0, 0]],
[-128, -128, [128, 255, 255, 255]],
[255, 255, [255, 0, 0, 0]],
[-255, -255, [1, 255, 255, 255]],
[255.1, 255, [255, 0, 0, 0]],
[255.9, 255, [255, 0, 0, 0]],
[256, 256, [0, 1, 0, 0]],
[32767, 32767, [255, 127, 0, 0]],
[-32767, -32767, [1, 128, 255, 255]],
[32768, 32768, [0, 128, 0, 0]],
[-32768, -32768, [0, 128, 255, 255]],
[65535, 65535, [255, 255, 0, 0]],
[65536, 65536, [0, 0, 1, 0]],
[65537, 65537, [1, 0, 1, 0]],
[65536.54321, 65536, [0, 0, 1, 0]],
[-65536.54321, -65536, [0, 0, 255, 255]],
[2147483647, 2147483647, [255, 255, 255, 127]],
[-2147483647, -2147483647, [1, 0, 0, 128]],
[2147483648, -2147483648, [0, 0, 0, 128]],
[-2147483648, -2147483648, [0, 0, 0, 128]],
[2147483649, -2147483647, [1, 0, 0, 128]],
[-2147483649, 2147483647, [255, 255, 255, 127]],
[4294967295, -1, [255, 255, 255, 255]],
[4294967296, 0, [0, 0, 0, 0]],
[4294967297, 1, [1, 0, 0, 0]],
[9007199254740991, -1, [255, 255, 255, 255]],
[-9007199254740991, 1, [1, 0, 0, 0]],
[9007199254740992, 0, [0, 0, 0, 0]],
[-9007199254740992, 0, [0, 0, 0, 0]],
[9007199254740994, 2, [2, 0, 0, 0]],
[-9007199254740994, -2, [254, 255, 255, 255]],
[Infinity, 0, [0, 0, 0, 0]],
[-Infinity, 0, [0, 0, 0, 0]],
[-1.7976931348623157e+308, 0, [0, 0, 0, 0]],
[1.7976931348623157e+308, 0, [0, 0, 0, 0]],
[5e-324, 0, [0, 0, 0, 0]],<|fim▁hole|> for (const [value, conversion, little] of data) {
const big = little.slice().reverse();
const representation = LITTLE_ENDIAN ? little : big;
int32array[0] = value;
assert.same(int32array[0], conversion, `Int32Array ${ toString(value) } -> ${ toString(conversion) }`);
assert.arrayEqual(uint8array, representation, `Int32Array ${ toString(value) } -> [${ representation }]`);
dataview.setInt32(0, value);
assert.arrayEqual(uint8array, big, `dataview.setInt32(0, ${ toString(value) }) -> [${ big }]`);
assert.same(viewFrom(big).getInt32(0), conversion, `dataview{${ big }}.getInt32(0) -> ${ toString(conversion) }`);
dataview.setInt32(0, value, false);
assert.arrayEqual(uint8array, big, `dataview.setInt32(0, ${ toString(value) }, false) -> [${ big }]`);
assert.same(viewFrom(big).getInt32(0, false), conversion, `dataview{${ big }}.getInt32(0, false) -> ${ toString(conversion) }`);
dataview.setInt32(0, value, true);
assert.arrayEqual(uint8array, little, `dataview.setInt32(0, ${ toString(value) }, true) -> [${ little }]`);
assert.same(viewFrom(little).getInt32(0, true), conversion, `dataview{${ little }}.getInt32(0, true) -> ${ toString(conversion) }`);
}
});<|fim▁end|>
|
[-5e-324, 0, [0, 0, 0, 0]],
[NaN, 0, [0, 0, 0, 0]],
];
|
<|file_name|>console.rs<|end_file_name|><|fim▁begin|>use core;
use x86::io::outb;<|fim▁hole|>
use ::console::{Color, ConsoleCore};
const VGA_TEXT_BASE: *mut u16 = 0xB8000 as *mut u16;
// TODO: Read this from BIOS instead
const VGA_BASE_PORT: u16 = 0x3d4;
pub struct ConsoleCoreImpl {
x: usize,
y: usize,
fg: Color,
bg: Color,
show_cursor: bool,
}
impl ConsoleCoreImpl {
fn update_hw_cursor(&mut self) {
const CMD_CURSOR_LOW: u8 = 0x0f;
const CMD_CURSOR_HIGH: u8 = 0x0e;
if self.show_cursor {
let pos = self.y * self.width() + self.x;
unsafe {
outb(VGA_BASE_PORT, CMD_CURSOR_LOW);
outb(VGA_BASE_PORT + 1, pos as u8);
outb(VGA_BASE_PORT, CMD_CURSOR_HIGH);
outb(VGA_BASE_PORT + 1, (pos >> 8) as u8);
}
}
}
}
impl ConsoleCore for ConsoleCoreImpl {
fn new() -> Self {
ConsoleCoreImpl {
x: 0,
y: 0,
fg: Color::White,
bg: Color::Black,
show_cursor: true,
}
}
fn set_cursor(&mut self, x: usize, y: usize) -> bool {
if self.x >= self.width() || self.y >= self.height() {
return false;
}
self.x = x;
self.y = y;
self.update_hw_cursor();
true
}
fn set_color(&mut self, fg: Option<Color>, bg: Option<Color>) {
if let Some(c) = fg { self.fg = c }
if let Some(c) = bg { self.bg = c }
}
fn set_char(&mut self, ch: u8) {
let color = self.fg as u16 | ((self.bg as u16) << 4);
unsafe {
let addr = VGA_TEXT_BASE.offset((self.y * self.width() + self.x) as isize);
core::intrinsics::volatile_store(addr, ch as u16 | (color << 8));
}
}
fn show_cursor(&mut self, v: bool) {
self.show_cursor = v;
}
fn advance_cursor(&mut self) {
if self.x == self.width() - 1 {
self.advance_line();
} else {
self.x += 1;
self.update_hw_cursor();
}
}
fn advance_line(&mut self) {
let width = self.width();
let height = self.height();
assert!(height > 0);
if self.y == self.height() - 1 {
unsafe {
core::intrinsics::volatile_copy_memory(VGA_TEXT_BASE, VGA_TEXT_BASE.offset(width as isize), width * (height - 1));
}
let color = (self.bg as u16) << 4;
for x in 0..width {
unsafe {
let addr = VGA_TEXT_BASE.offset((width * (height - 1) + x) as isize);
core::intrinsics::volatile_store(addr, b' ' as u16 | (color << 8));
}
}
} else {
self.y += 1;
}
self.x = 0;
self.update_hw_cursor();
}
fn width(&self) -> usize {
80
}
fn height(&self) -> usize {
25
}
fn cursor(&self) -> (usize, usize) {
(self.x, self.y)
}
}<|fim▁end|>
| |
<|file_name|>sctp_unsupported.go<|end_file_name|><|fim▁begin|>// +build !linux linux,386<|fim▁hole|> "errors"
"net"
"runtime"
)
var ErrUnsupported = errors.New("SCTP is unsupported on " + runtime.GOOS + "/" + runtime.GOARCH)
func setsockopt(fd int, optname, optval, optlen uintptr) (uintptr, uintptr, error) {
return 0, 0, ErrUnsupported
}
func getsockopt(fd int, optname, optval, optlen uintptr) (uintptr, uintptr, error) {
return 0, 0, ErrUnsupported
}
func (c *SCTPConn) SCTPWrite(b []byte, info *SndRcvInfo) (int, error) {
return 0, ErrUnsupported
}
func (c *SCTPConn) SCTPRead(b []byte) (int, *SndRcvInfo, error) {
return 0, nil, ErrUnsupported
}
func (c *SCTPConn) Close() error {
return ErrUnsupported
}
func ListenSCTP(net string, laddr *SCTPAddr) (*SCTPListener, error) {
return nil, ErrUnsupported
}
func ListenSCTPExt(net string, laddr *SCTPAddr, options InitMsg) (*SCTPListener, error) {
return nil, ErrUnsupported
}
func (ln *SCTPListener) Accept() (net.Conn, error) {
return nil, ErrUnsupported
}
func (ln *SCTPListener) AcceptSCTP() (*SCTPConn, error) {
return nil, ErrUnsupported
}
func (ln *SCTPListener) Close() error {
return ErrUnsupported
}
func DialSCTP(net string, laddr, raddr *SCTPAddr) (*SCTPConn, error) {
return nil, ErrUnsupported
}
func DialSCTPExt(network string, laddr, raddr *SCTPAddr, options InitMsg) (*SCTPConn, error) {
return nil, ErrUnsupported
}<|fim▁end|>
|
package sctp
import (
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""passlib.ext.django.models -- monkeypatch django hashing framework"""
#=============================================================================
# imports
#=============================================================================
# core
import logging; log = logging.getLogger(__name__)
from warnings import warn
# site
from django import VERSION
from django.conf import settings
# pkg
from passlib.context import CryptContext
from passlib.exc import ExpectedTypeError
from passlib.ext.django.utils import _PatchManager, hasher_to_passlib_name, \
get_passlib_hasher, get_preset_config
from passlib.utils.compat import callable, unicode, bytes
# local
__all__ = ["password_context"]
#=============================================================================
# global attrs
#=============================================================================
# the context object which this patches contrib.auth to use for password hashing.
# configuration controlled by ``settings.PASSLIB_CONFIG``.
password_context = CryptContext()
# function mapping User objects -> passlib user category.
# may be overridden via ``settings.PASSLIB_GET_CATEGORY``.<|fim▁hole|> if user.is_superuser:
return "superuser"
elif user.is_staff:
return "staff"
else:
return None
# object used to track state of patches applied to django.
_manager = _PatchManager(log=logging.getLogger(__name__ + "._manager"))
# patch status
_patched = False
#=============================================================================
# applying & removing the patches
#=============================================================================
def _apply_patch():
"""monkeypatch django's password handling to use ``passlib_context``,
assumes the caller will configure the object.
"""
#
# setup constants
#
log.debug("preparing to monkeypatch 'django.contrib.auth' ...")
global _patched
assert not _patched, "monkeypatching already applied"
HASHERS_PATH = "django.contrib.auth.hashers"
MODELS_PATH = "django.contrib.auth.models"
USER_PATH = MODELS_PATH + ":User"
FORMS_PATH = "django.contrib.auth.forms"
#
# import UNUSUABLE_PASSWORD and is_password_usuable() helpers
# (providing stubs for older django versions)
#
if VERSION < (1,4):
has_hashers = False
if VERSION < (1,0):
UNUSABLE_PASSWORD = "!"
else:
from django.contrib.auth.models import UNUSABLE_PASSWORD
def is_password_usable(encoded):
return encoded is not None and encoded != UNUSABLE_PASSWORD
def is_valid_secret(secret):
return secret is not None
elif VERSION < (1,6):
has_hashers = True
from django.contrib.auth.hashers import UNUSABLE_PASSWORD, \
is_password_usable
# NOTE: 1.4 - 1.5 - empty passwords no longer valid.
def is_valid_secret(secret):
return bool(secret)
else:
has_hashers = True
from django.contrib.auth.hashers import is_password_usable
# 1.6 - empty passwords valid again
def is_valid_secret(secret):
return secret is not None
if VERSION < (1,6):
def make_unusable_password():
return UNUSABLE_PASSWORD
else:
from django.contrib.auth.hashers import make_password as _make_password
def make_unusable_password():
return _make_password(None)
# django 1.4.6+ uses a separate hasher for "sha1$$digest" hashes
has_unsalted_sha1 = (VERSION >= (1,4,6))
#
# backport ``User.set_unusable_password()`` for Django 0.9
# (simplifies rest of the code)
#
if not hasattr(_manager.getorig(USER_PATH), "set_unusable_password"):
assert VERSION < (1,0)
@_manager.monkeypatch(USER_PATH)
def set_unusable_password(user):
user.password = make_unusable_password()
@_manager.monkeypatch(USER_PATH)
def has_usable_password(user):
return is_password_usable(user.password)
#
# patch ``User.set_password() & ``User.check_password()`` to use
# context & get_category (would just leave these as wrappers for hashers
# module under django 1.4, but then we couldn't pass User object into
# get_category very easily)
#
@_manager.monkeypatch(USER_PATH)
def set_password(user, password):
"passlib replacement for User.set_password()"
if is_valid_secret(password):
# NOTE: pulls _get_category from module globals
cat = _get_category(user)
user.password = password_context.encrypt(password, category=cat)
else:
user.set_unusable_password()
@_manager.monkeypatch(USER_PATH)
def check_password(user, password):
"passlib replacement for User.check_password()"
hash = user.password
if not is_valid_secret(password) or not is_password_usable(hash):
return False
if not hash and VERSION < (1,4):
return False
# NOTE: pulls _get_category from module globals
cat = _get_category(user)
ok, new_hash = password_context.verify_and_update(password, hash,
category=cat)
if ok and new_hash is not None:
# migrate to new hash if needed.
user.password = new_hash
user.save()
return ok
#
# override check_password() with our own implementation
#
@_manager.monkeypatch(HASHERS_PATH, enable=has_hashers)
@_manager.monkeypatch(MODELS_PATH)
def check_password(password, encoded, setter=None, preferred="default"):
"passlib replacement for check_password()"
# XXX: this currently ignores "preferred" keyword, since it's purpose
# was for hash migration, and that's handled by the context.
if not is_valid_secret(password) or not is_password_usable(encoded):
return False
ok = password_context.verify(password, encoded)
if ok and setter and password_context.needs_update(encoded):
setter(password)
return ok
#
# patch the other functions defined in the ``hashers`` module, as well
# as any other known locations where they're imported within ``contrib.auth``
#
if has_hashers:
@_manager.monkeypatch(HASHERS_PATH)
@_manager.monkeypatch(MODELS_PATH)
def make_password(password, salt=None, hasher="default"):
"passlib replacement for make_password()"
if not is_valid_secret(password):
return make_unusable_password()
if hasher == "default":
scheme = None
else:
scheme = hasher_to_passlib_name(hasher)
kwds = dict(scheme=scheme)
handler = password_context.handler(scheme)
# NOTE: django make specify an empty string for the salt,
# even if scheme doesn't accept a salt. we omit keyword
# in that case.
if salt is not None and (salt or 'salt' in handler.setting_kwds):
kwds['salt'] = salt
return password_context.encrypt(password, **kwds)
@_manager.monkeypatch(HASHERS_PATH)
@_manager.monkeypatch(FORMS_PATH)
def get_hasher(algorithm="default"):
"passlib replacement for get_hasher()"
if algorithm == "default":
scheme = None
else:
scheme = hasher_to_passlib_name(algorithm)
# NOTE: resolving scheme -> handler instead of
# passing scheme into get_passlib_hasher(),
# in case context contains custom handler
# shadowing name of a builtin handler.
handler = password_context.handler(scheme)
return get_passlib_hasher(handler, algorithm=algorithm)
# identify_hasher() was added in django 1.5,
# patching it anyways for 1.4, so passlib's version is always available.
@_manager.monkeypatch(HASHERS_PATH)
@_manager.monkeypatch(FORMS_PATH)
def identify_hasher(encoded):
"passlib helper to identify hasher from encoded password"
handler = password_context.identify(encoded, resolve=True,
required=True)
algorithm = None
if (has_unsalted_sha1 and handler.name == "django_salted_sha1" and
encoded.startswith("sha1$$")):
# django 1.4.6+ uses a separate hasher for "sha1$$digest" hashes,
# but passlib just reuses the "sha1$salt$digest" handler.
# we want to resolve to correct django hasher.
algorithm = "unsalted_sha1"
return get_passlib_hasher(handler, algorithm=algorithm)
_patched = True
log.debug("... finished monkeypatching django")
def _remove_patch():
"""undo the django monkeypatching done by this module.
offered as a last resort if it's ever needed.
.. warning::
This may cause problems if any other Django modules have imported
their own copies of the patched functions, though the patched
code has been designed to throw an error as soon as possible in
this case.
"""
global _patched
if _patched:
log.debug("removing django monkeypatching...")
_manager.unpatch_all(unpatch_conflicts=True)
password_context.load({})
_patched = False
log.debug("...finished removing django monkeypatching")
return True
if _manager: # pragma: no cover -- sanity check
log.warning("reverting partial monkeypatching of django...")
_manager.unpatch_all()
password_context.load({})
log.debug("...finished removing django monkeypatching")
return True
log.debug("django not monkeypatched")
return False
#=============================================================================
# main code
#=============================================================================
def _load():
global _get_category
# TODO: would like to add support for inheriting config from a preset
# (or from existing hasher state) and letting PASSLIB_CONFIG
# be an update, not a replacement.
# TODO: wrap and import any custom hashers as passlib handlers,
# so they could be used in the passlib config.
# load config from settings
_UNSET = object()
config = getattr(settings, "PASSLIB_CONFIG", _UNSET)
if config is _UNSET:
# XXX: should probably deprecate this alias
config = getattr(settings, "PASSLIB_CONTEXT", _UNSET)
if config is _UNSET:
config = "passlib-default"
if config is None:
warn("setting PASSLIB_CONFIG=None is deprecated, "
"and support will be removed in Passlib 1.8, "
"use PASSLIB_CONFIG='disabled' instead.",
DeprecationWarning)
config = "disabled"
elif not isinstance(config, (unicode, bytes, dict)):
raise ExpectedTypeError(config, "str or dict", "PASSLIB_CONFIG")
# load custom category func (if any)
get_category = getattr(settings, "PASSLIB_GET_CATEGORY", None)
if get_category and not callable(get_category):
raise ExpectedTypeError(get_category, "callable", "PASSLIB_GET_CATEGORY")
# check if we've been disabled
if config == "disabled":
if _patched: # pragma: no cover -- sanity check
log.error("didn't expect monkeypatching would be applied!")
_remove_patch()
return
# resolve any preset aliases
if isinstance(config, str) and '\n' not in config:
config = get_preset_config(config)
# setup context
_apply_patch()
password_context.load(config)
if get_category:
# NOTE: _get_category is module global which is read by
# monkeypatched functions constructed by _apply_patch()
_get_category = get_category
log.debug("passlib.ext.django loaded")
# wrap load function so we can undo any patching if something goes wrong
try:
_load()
except:
_remove_patch()
raise
#=============================================================================
# eof
#=============================================================================<|fim▁end|>
|
def _get_category(user):
"""default get_category() implementation"""
|
<|file_name|>zephyr.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# Copyright (c) 2015, Roberto Riggio
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the CREATE-NET nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY CREATE-NET ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL CREATE-NET BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Basic Zephyr manager."""
from empower.core.app import EmpowerApp
from empower.core.app import DEFAULT_PERIOD
from empower.main import RUNTIME
from empower.datatypes.etheraddress import EtherAddress
from empower.core.resourcepool import ResourcePool
from empower.lvapp.lvappconnection import LVAPPConnection
import time, datetime, threading
import empower.apps.zephyr.zephyrLinker as linker
starttime = datetime.datetime.now()
class Zephyr(EmpowerApp):
"""Basic mobility manager.
Command Line Parameters:
tenant_id: tenant id
limit: handover limit in dBm (optional, default -80)
every: loop period in ms (optional, default 5000ms)
Example:
./empower-runtime.py apps.mobilitymanager.mobilitymanager \
--tenant_id=52313ecb-9d00-4b7d-b873-b55d3d9ada26
"""
def __init__(self, **kwargs):
self.__limit = linker.DEFAULT_RSSI_LIMIT
EmpowerApp.__init__(self, **kwargs)
# Register an wtp up event
self.wtpup(callback=self.wtp_up_callback)
# Register an lvap join event
self.lvapjoin(callback=self.lvap_join_callback)
# Register an lvap leave event
self.lvapleave(callback=self.lvap_leave_callback)
def lvap_leave_callback(self, lvap):
"""Called when an LVAP disassociates from a tennant."""
self.log.info("LVAP %s left %s" % (lvap.addr, lvap.ssid))
def wtp_up_callback(self, wtp):
"""Called when a new WTP connects to the controller."""
for block in wtp.supports:
self.ucqm(block=block, every=self.every)
def lvap_join_callback(self, lvap):
"""Called when an joins the network."""
self.rssi(lvap=lvap.addr, value=self.limit, relation='LT',
callback=self.low_rssi)
def handover(self, lvap):
""" Handover the LVAP to a WTP with
an RSSI higher that -65dB. """
self.log.info("Running handover...")
self.log.info("LVAP: %s - Limit RSSI : %u dB" % (lvap.addr, self.limit))
self.log.info("Initialize the Resource Pool")
pool = ResourcePool()
for wtp in self.wtps():
#for wtpd, lvaps in wtpdict.items():
#self.log.info("WTP in wtps : %s WTP in dict : %s are equal : %u\n" % (str(wtp.addr), wtpd, (wtp.addr == wtpd)))
templist = linker.wtpdict[str(wtp.addr)]
length = len(templist)
self.log.info("Pooling WTP: %s" % str(wtp.addr))
self.log.info(wtp.supports)
pool = pool | wtp.supports
self.log.info("Select matching Resource Blocks")
matches = pool #& lvap.scheduled_on
self.log.info(matches)
self.log.info("LVAP1 LOOP 107")
counter=0
for lvap in self.lvaps():
self.log.info("!!!!!!!!!!!!!!!%d : %s" % (counter, lvap.addr))
counter=counter+1
for block in matches:
self.log.info("Time : %f \n LVAP : %s \n addr : %s \n last_rssi_avg : %.2f \n last_rssi_std : %.2f \n last_packets : %u \n mov_rrsi : %.2f\n" % (time.time(),
lvap.addr,
block.ucqm[lvap.addr]['addr'],
block.ucqm[lvap.addr]['last_rssi_avg'],
block.ucqm[lvap.addr]['last_rssi_std'],
block.ucqm[lvap.addr]['last_packets'],
block.ucqm[lvap.addr]['mov_rssi']))
if (lvap.addr=="78:44:76:BF:DA:D4"):
self.log.info("LVAP: %s is leaving" % lvap.addr)
#del lvap.downlink[block] #deletes lvap
# Initialize the Resource Pool
pool = ResourcePool()
# Update the Resource Pool with all
# the available Resourse Blocks
for wtp in self.wtps():
if (str(wtp.addr) in linker.wtpdict):
if (len(linker.wtpdict[str(wtp.addr)]) < linker.wtpdict_limit[str(wtp.addr)]):
pool = pool | wtp.supports
# Select matching Resource Blocks
matches = pool & lvap.scheduled_on
# Filter Resource Blocks by RSSI
valid = [block for block in matches
if block.ucqm[lvap.addr]['mov_rssi'] >= self.limit]
#valid = self.blocks(lvap, self.limit)
if not valid:
self.log.info("not valid")
return
for block in valid:
self.log.info("valid LVAP: %s - Current RSSI : %u dB" % (lvap.addr, float(block.ucqm[lvap.addr]['mov_rssi'])))
new_block = max(valid, key=lambda x: x.ucqm[lvap.addr]['mov_rssi'])
self.log.info("LVAP %s setting new block %s" % (lvap.addr, new_block))
lvap.scheduled_on = new_block
@property
def limit(self):
"""Return loop period."""
return self.__limit
@limit.setter
def limit(self, value):
"""Set limit."""
limit = int(value)
if limit > 0 or limit < -100:
raise ValueError("Invalid value for limit")
self.log.info("Setting limit %u dB" % value)
self.__limit = limit
def set_limit(self, value):
"""Set limit."""
limit = int(value)
if limit > 0 or limit < -100:
raise ValueError("Invalid value for limit")
self.log.info("Setting limit %u dB" % value)
self.__limit = limit
def low_rssi(self, trigger):
""" Perform handover if an LVAP's rssi is
going below the threshold. """
self.log.info("Received trigger from %s rssi %u dB",
trigger.event['block'],
trigger.event['current'])
lvap = self.lvap(trigger.lvap)
if not lvap:
return
self.handover(lvap)
def wtp_clientlimit(self):
self.log.info("Running Client Limit...")
wtp_c=0
for wtp in self.wtps():
#Create lvaplist for the specific wtp
lvaplist = []
for lvap in self.lvaps():
if lvap.wtp.addr == wtp.addr:
#self.log.info("LVAP before list : %s" % lvap.addr)
lvaplist.append(str(lvap.addr))
#self.log.info("LVAP after list : %s" % lvaplist[-1])
#Check if limit is not given and provide the default
#if str(wtp.addr) not in linker.wtpdict_limit:
#linker.wtpdict_limit[str(wtp.addr)]=linker.DEFAULT_LVAP_NUMBER_LIMIT
#Check if wtp is not on the list and add it
if str(wtp.addr) not in linker.wtpdict:
linker.wtpdict[str(wtp.addr)] = lvaplist
#If limit is -1 then wtp has no limit
if linker.wtpdict_limit[str(wtp.addr)] == -1:
self.log.info("################ WTP : %s has unlimited LVAPs (limit %f) %s ######################\n" % (wtp, linker.wtpdict_limit[str(wtp.addr)], linker.wtpdict[str(wtp.addr)]))
continue
#If wtp client limit is exceeded, then handover the excess lvaps to new wtp
elif len(lvaplist) > linker.wtpdict_limit[str(wtp.addr)]:
self.log.info("################ WTP : %s has more LVAPs than the limit %f ######################\n" % (wtp, linker.wtpdict_limit[str(wtp.addr)]))
self.log.info(lvaplist)
self.log.info(linker.wtpdict[str(wtp.addr)])
diff = [a for a in lvaplist+linker.wtpdict[str(wtp.addr)] if (a not in lvaplist) or (a not in linker.wtpdict[str(wtp.addr)])]
self.log.info(diff)
numoflvaptohandover=len(lvaplist) - linker.wtpdict_limit[str(wtp.addr)]
self.log.info(numoflvaptohandover)
for lvap in self.lvaps():
#If lvap is the extra lvap in wtp then find wtp with best rssi and handover to that
if lvap.addr in diff or lvap.addr in lvaplist:
self.log.info("If lvap in diff")
# Initialize the Resource Pool
pool = ResourcePool()
# Update the Resource Pool with all
# the available Resourse Blocks
for other_wtp in self.wtps():
if other_wtp.addr != wtp.addr:
if linker.wtpdict_limit[str(other_wtp.addr)] < len(linker.wtpdict[str(other_wtp.addr)]):
self.log.info(linker.wtpdict_limit[str(other_wtp.addr)])
self.log.info(len(linker.wtpdict[str(other_wtp.addr)]))
pool = pool | other_wtp.supports
# Select matching Resource Blocks
matches = pool & lvap.scheduled_on
max_rssi = -float("inf")
first_block=1;
for block in matches:
if first_block == 1:
first_block=0
max_rssi=block.ucqm[lvap.addr]['mov_rssi']
else:
if max_rssi < block.ucqm[lvap.addr]['mov_rssi']:
max_rssi=block.ucqm[lvap.addr]['mov_rssi']
# Filter Resource Blocks by RSSI
valid = [block for block in matches
if block.ucqm[lvap.addr]['mov_rssi'] >= max_rssi]
if not valid:
self.log.info("not valid")
continue
for block in valid:
self.log.info("valid LVAP: %s - Current RSSI : %.2f dB" % (lvap.addr, float(block.ucqm[lvap.addr]['mov_rssi'])))
#Remove from lvaplist
lvaplist.remove(str(lvap.addr))
new_block = max(valid, key=lambda x: x.ucqm[lvap.addr]['mov_rssi'])
self.log.info("LVAP %s setting new block %s" % (lvap.addr, new_block))
lvap.scheduled_on = new_block
numoflvaptohandover=numoflvaptohandover-1
else:
continue
#if all lvaps have been handovered then break
if numoflvaptohandover == 0:
break
else:
self.log.info("################ WTP : %s has LVAPs' limit %f %s ######################\n" % (wtp, linker.wtpdict_limit[str(wtp.addr)], linker.wtpdict[str(wtp.addr)]))
#Update lvaplist for given wtp
linker.wtpdict[str(wtp.addr)] = lvaplist
for wtp, lvaps in linker.wtpdict.items():
temp = None
insert_comma = 0
for lvap in lvaps:
if insert_comma == 0:
temp = lvap
insert_comma=1
continue
temp = temp + ', ' + lvap #str(lvaps).strip('['']')#.strip('[EtherAddress'']')
self.log.info("WTP : %s has %u LVAPs : %s\n" % (wtp, len(lvaps), temp))
#self.wtp_lvap_limit(wtp,lvaps)
#if len(lvaps) > linker.DEFAULT_LVAP_NUMBER_LIMIT:
#self.log.info("################WTP : %s has more LVAPs than the limit######################\n" % wtp)
#for wtp in self.wtps()
def lvap_timelimit(self):
self.log.info("Running Time Limit...")
self.log.info("DEFAULT_LVAP_TIME_LIMIT : %d" % linker.DEFAULT_LVAP_TIME_LIMIT)
deletionlist = []
for lvap, endtime in linker.lvap_timer.items():
#self.log.info("LVAP")
formated_endtime = datetime.datetime.strptime(endtime, '%Y-%m-%d %H:%M:%S')
currenttime = datetime.datetime.now()
if (currenttime - formated_endtime).total_seconds() >= 0:
self.log.info("$$$$$$$$$$$$$ LVAP: %s Time ends" % lvap)
deletionlist.append(lvap)
else:
self.log.info("$$$$$$$$$$$$$ LVAP: %s Time continues" % lvap)
for dlvap in deletionlist:
self.log.info("$$$$$$$$$$$$$ Removing Timer LVAP: %s" % dlvap)
linker.removeLVAPTimer(self,dlvap)
for lvap in self.lvaps():
if str(lvap.addr) == dlvap:
lvaplabel=RUNTIME.get_label(lvap.addr)
self.log.info("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$")
#del lvap.downlink[lvap.block] #deletes lvap
#del RUNTIME.lvaps[lvap.addr]
for wtp in self.wtps():
if lvap.wtp.addr == wtp.addr:
#wtp.connection.send_del_lvap(lvap)
RUNTIME.remove_lvap(lvap.addr)
temp = linker.wtpdict[str(wtp.addr)]
temp.remove(str(lvap.addr))
#del RUNTIME.lvaps[lvap.addr]
break
#self.remove_lvap(lvap)
lvaplabel=RUNTIME.get_label(lvap.addr)
#self.log.info(lvaplabel)
self.log.info("Deleting LVAP %s from db" % lvaplabel)
self.log.info("Removing %s %s from allowed LVAPs" % (lvaplabel, lvap.addr))
RUNTIME.remove_allowed(lvap.addr)
self.log.info("Adding %s %s to denied LVAPs" % (lvaplabel, lvap.addr))
RUNTIME.add_denied(lvap.addr,lvaplabel)
self.log.info("LVAP %s deleted" % lvaplabel)
break
#pool = ResourcePool()
#for lvap in self.lvaps():
# matches = pool
# for block in matches:
# self.log.info("zephyr : LVAP: %s - Current RSSI : %f dB" % (lvap.addr, float(block.ucqm[lvap.addr]['mov_rssi'])))
def loop(self):
""" Periodic job. """
self.log.info("Periodic job.\n")
self.log.info("Allowed LVAPs: %s" % (RUNTIME.allowed))
self.log.info("Denied LVAPs: %s\n" % (RUNTIME.denied))
<|fim▁hole|> linker.wtpdict_limit[str(wtp.addr)]=linker.DEFAULT_LVAP_NUMBER_LIMIT
linker.initialize_limit = 0
self.log.info("Setting limit to default")
self.wtp_clientlimit()
self.lvap_timelimit()
self.log.info("Current limit %u linker limit to %u" % (self.limit,linker.RSSI_LIMIT))
if self.limit != linker.RSSI_LIMIT:
self.log.info("Current limit %u setting limit to %u" % (self.limit,linker.RSSI_LIMIT))
self.set_limit(linker.RSSI_LIMIT)
# Handover every active LVAP to
# the best WTP
counterlvap=0
for lvap in self.lvaps():
self.handover(lvap)
counterlvap=counterlvap+1
self.log.info("Active LVAPs: %u" % counterlvap)
def launch(tenant_id, limit=linker.DEFAULT_RSSI_LIMIT, every=DEFAULT_PERIOD):
""" Initialize the module. """
return Zephyr(tenant_id=tenant_id, limit=limit, every=every)<|fim▁end|>
|
if linker.initialize_limit == 1:
for wtp in self.wtps():
#Check if limit is not given and provide the default
if str(wtp.addr) not in linker.wtpdict_limit:
|
<|file_name|>config_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
DEBUG = True
TESTING = True
SECRET_KEY = 'SECRET_KEY'
DATABASE_URI = 'mysql+pymysql://root:[email protected]/git_webhook'
CELERY_BROKER_URL = 'redis://:@127.0.0.1:6379/0'
CELERY_RESULT_BACKEND = 'redis://:@127.0.0.1:6379/0'
<|fim▁hole|>GITHUB_CLIENT_ID = '123'
GITHUB_CLIENT_SECRET = 'SECRET'<|fim▁end|>
|
SOCKET_MESSAGE_QUEUE = 'redis://:@127.0.0.1:6379/0'
|
<|file_name|>bodyWriter.go<|end_file_name|><|fim▁begin|>package http1
import (
"errors"
"fmt"
"io"
"strconv"
"github.com/JOTPOT-UK/JOTPOT-Server/jps/pipe"
"github.com/JOTPOT-UK/JOTPOT-Server/util"
"github.com/JOTPOT-UK/JOTPOT-Server/http"
"github.com/JOTPOT-UK/JOTPOT-Server/jps"
"github.com/JOTPOT-UK/JOTPOT-Server/http/header"
)
var ErrBodyLengthExceded = errors.New("body length exceded")
var ErrNoBody = errors.New("no body")
type writerWrapper struct {
WriteFlusher util.WriteFlusher
Closer func() error
HeaderCB func() []byte
HeadWritten bool
}
func (ww *writerWrapper) writeHeader() error {
head := ww.HeaderCB()
n, err := ww.WriteFlusher.Write(head)
if err != nil {
return fmt.Errorf("failed to write header: %w", err)
}
if n != len(head) {
return fmt.Errorf("failed to write header: %w", io.ErrShortWrite)
}
ww.HeadWritten = true
return nil
}
func (ww *writerWrapper) Write(src []byte) (int, error) {
if !ww.HeadWritten {
head := ww.HeaderCB()
n, err := ww.WriteFlusher.Write(append(head, src...))
n -= len(head)
if n < 0 {
if err == nil {
err = fmt.Errorf("failed to write header: %w", io.ErrShortWrite)
}
} else {
ww.HeadWritten = true
}
return n, err
}
return ww.WriteFlusher.Write(src)
}
func (ww *writerWrapper) Flush() error {
if !ww.HeadWritten {
err := ww.writeHeader()
if err != nil {
ww.WriteFlusher.Flush()
return err
}
}
return ww.WriteFlusher.Flush()
}
func (ww *writerWrapper) Close() error {
if !ww.HeadWritten {
err := ww.writeHeader()
if err != nil {
ww.Closer()
return err
}
}
return ww.Closer()
}
type nilBody struct {
util.CloseFlusher
}
<|fim▁hole|> return 0, ErrNoBody
}
type LimitWriteFlushCloser struct {
i, l int64
w jps.WriteFlushCloser
}
func (w LimitWriteFlushCloser) Write(buf []byte) (int, error) {
w.i += int64(len(buf))
if w.i > w.l {
overflow := len(buf) - int(w.i-w.l)
w.i = w.l
toWrite := len(buf) - overflow
if toWrite == 0 {
return 0, ErrBodyLengthExceded
}
n, err := w.w.Write(buf[:toWrite])
if n == toWrite || err == nil {
err = ErrBodyLengthExceded
}
return n, err
}
return w.w.Write(buf)
}
func (w LimitWriteFlushCloser) Close() error {
return w.w.Close()
}
func (w LimitWriteFlushCloser) Flush() error {
return w.w.Flush()
}
type CloseCBWriteFlushCloser struct {
closed bool
close func() (error, bool)
passon jps.WriteFlushCloser
}
func (w *CloseCBWriteFlushCloser) Write(src []byte) (int, error) {
if w.closed {
return 0, io.ErrClosedPipe
}
return w.passon.Write(src)
}
func (w *CloseCBWriteFlushCloser) Flush() error {
if w.closed {
return io.ErrClosedPipe
}
return w.passon.Flush()
}
func (w *CloseCBWriteFlushCloser) Close() (err error) {
if w.closed {
return io.ErrClosedPipe
}
//TODO: Fix properly!!!
/*if err = w.passon.Close(); err != nil {
return
}*/
err, w.closed = w.close()
return
}
type BodyWriter struct {
ses jps.Session
config *http.Config
header *header.Header
req *http.Request
hasBody func() bool
finalWriter writerWrapper
writer jps.WriteFlushCloser
}
func NewBodyWriter(
ses jps.Session,
config *http.Config,
header *header.Header,
req *http.Request,
hasBody func() bool,
rawWriter jps.WriteFlushCloser,
headerGenerator func() []byte,
close func() error,
) BodyWriter {
return BodyWriter{
ses: ses,
config: config,
header: header,
req: req,
hasBody: hasBody,
finalWriter: writerWrapper{
WriteFlusher: rawWriter,
Closer: close,
HeaderCB: headerGenerator,
},
}
}
func (w *BodyWriter) Session() jps.Session {
return w.ses
}
func (w *BodyWriter) BodyLength() (int64, error) {
if w.req != nil && w.req.MethodStr == "HEAD" {
return 0, nil
}
lens := w.header.GetValues("Content-Length")
if len(lens) == 0 {
return -1, nil
} else if len(lens) == 1 {
l, err := strconv.ParseUint(lens[0], 10, 63)
if err != nil {
return -2, http.MakeErrMalformedContentLength(err)
}
return int64(l), nil
} else {
return -2, http.ErrTooManyContentLength
}
}
func (w *BodyWriter) SetBodyLength(length int64) error {
if length >= 0 {
w.header.Set("Content-Length", strconv.FormatUint(uint64(length), 10))
codes := w.header.GetValues("Transfer-Encoding")
last := len(codes) - 1
if last > -1 && codes[last] == "chunked" {
w.header.SetValues("Transfer-Encoding", codes[:last])
}
} else if length == -1 {
w.header.Del("Content-Length")
codes := w.header.GetValues("Transfer-Encoding")
last := len(codes) - 1
if last < 0 || codes[last] != "chunked" {
w.header.Add("Transfer-Encodings", "chunked")
}
} else {
panic("body length cannot be less than -1")
}
return nil
}
func (w *BodyWriter) Body() (jps.WriteFlushCloser, error) {
if w.writer == nil {
if w.hasBody() {
//TIMER:start := time.Now()
l, err := w.BodyLength()
if err != nil {
return nil, err
}
//TIMER:jps.HBLengthTimes = append(jps.HBLengthTimes, time.Since(start))
//TIMER:start = time.Now()
if l == -1 {
codes := w.header.GetValuesRawKey("Transfer-Encoding")
lm1 := len(codes) - 1
for i := 0; i < lm1; i++ {
if codes[i] == "chunked" {
return nil, http.ErrMustChunkLast
}
}
//TODO: Close connection if chunked is not last
pipes, ok := w.config.TransferEncodings.GetWriterPipeGenerators(codes)
if !ok {
return nil, http.ErrUnsupportedTransferEncoding
}
if w.writer, err = pipe.To(&w.finalWriter, pipes); err != nil {
return w.writer, err
}
} else {
//TIMER:jps.HBHeadWriteTimes = append(jps.HBHeadWriteTimes, time.Since(start))
w.writer = LimitWriteFlushCloser{
l: l,
w: &w.finalWriter,
}
}
} else {
w.writer = nilBody{&w.finalWriter}
}
}
return w.writer, nil
}<|fim▁end|>
|
func (_ nilBody) Write(_ []byte) (int, error) {
|
<|file_name|>power.cpp<|end_file_name|><|fim▁begin|>//*****************************************************************************
// Copyright 2017-2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************
#include "ngraph/op/power.hpp"
#include "ngraph/op/divide.hpp"
#include "ngraph/op/log.hpp"
#include "ngraph/op/multiply.hpp"
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/runtime/reference/power.hpp"
using namespace std;
using namespace ngraph;
namespace
{
template <element::Type_t ET>
bool evaluate(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
const op::AutoBroadcastSpec& broadcast_spec)
{
runtime::reference::power(arg0->get_data_ptr<ET>(),
arg1->get_data_ptr<ET>(),
out->get_data_ptr<ET>(),
arg0->get_shape(),
arg1->get_shape(),
broadcast_spec);
return true;
}
bool evaluate_power(const HostTensorPtr& arg0,
const HostTensorPtr& arg1,
const HostTensorPtr& out,
const op::AutoBroadcastSpec& broadcast_spec)<|fim▁hole|> bool rc = true;
out->set_broadcast(broadcast_spec, arg0, arg1);
switch (arg0->get_element_type())
{
TYPE_CASE(i8)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(i16)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(i32)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(i64)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(u8)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(u16)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(u32)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(u64)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(f32)(arg0, arg1, out, broadcast_spec);
break;
TYPE_CASE(f64)(arg0, arg1, out, broadcast_spec);
break;
default: rc = false; break;
}
return rc;
}
}
constexpr NodeTypeInfo op::v1::Power::type_info;
op::v1::Power::Power(const Output<Node>& arg0,
const Output<Node>& arg1,
const AutoBroadcastSpec& auto_broadcast)
: BinaryElementwiseArithmetic(arg0, arg1, auto_broadcast)
{
constructor_validate_and_infer_types();
}
shared_ptr<Node> op::v1::Power::clone_with_new_inputs(const OutputVector& new_args) const
{
check_new_args_count(this, new_args);
return make_shared<op::v1::Power>(new_args.at(0), new_args.at(1), this->get_autob());
}
void op::v1::Power::generate_adjoints(autodiff::Adjoints& adjoints, const OutputVector& deltas)
{
bool static_shapes =
input(0).get_partial_shape().is_static() && input(1).get_partial_shape().is_static();
if (static_shapes && input(0).get_shape() == input(1).get_shape())
{
// It does not matter if broadcast is enabled since shapes match
}
else if (get_autob().m_type != op::AutoBroadcastType::NONE)
{
throw ngraph_error("Autodiff not supported with auto broadcasting");
}
auto delta = deltas.at(0);
auto x = input_value(0);
auto y = input_value(1);
auto log_x = make_shared<op::v0::Log>(x);
adjoints.add_delta(x, delta * y * shared_from_this() / x);
adjoints.add_delta(y, delta * shared_from_this() * log_x);
}
bool op::v1::Power::evaluate(const HostTensorVector& outputs, const HostTensorVector& inputs) const
{
return evaluate_power(inputs[0], inputs[1], outputs[0], get_autob());
}<|fim▁end|>
|
{
|
<|file_name|>in_tree.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* This file defines various in-tree volume test drivers for TestSuites.
*
* There are two ways, how to prepare test drivers:
* 1) With containerized server (NFS, Ceph, Gluster, iSCSI, ...)
* It creates a server pod which defines one volume for the tests.
* These tests work only when privileged containers are allowed, exporting
* various filesystems (NFS, GlusterFS, ...) usually needs some mounting or
* other privileged magic in the server pod.
*
* Note that the server containers are for testing purposes only and should not
* be used in production.
*
* 2) With server or cloud provider outside of Kubernetes (Cinder, GCE, AWS, Azure, ...)
* Appropriate server or cloud provider must exist somewhere outside
* the tested Kubernetes cluster. CreateVolume will create a new volume to be
* used in the TestSuites for inlineVolume or DynamicPV tests.
*/
package drivers
import (
"context"
"fmt"
"os/exec"
"strconv"
"strings"
"time"
"github.com/onsi/ginkgo"
v1 "k8s.io/api/core/v1"
rbacv1 "k8s.io/api/rbac/v1"
storagev1 "k8s.io/api/storage/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apiserver/pkg/authentication/serviceaccount"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/kubernetes/test/e2e/framework"
e2eauth "k8s.io/kubernetes/test/e2e/framework/auth"
e2enode "k8s.io/kubernetes/test/e2e/framework/node"
e2epod "k8s.io/kubernetes/test/e2e/framework/pod"
e2epv "k8s.io/kubernetes/test/e2e/framework/pv"
e2eskipper "k8s.io/kubernetes/test/e2e/framework/skipper"
e2evolume "k8s.io/kubernetes/test/e2e/framework/volume"
storageframework "k8s.io/kubernetes/test/e2e/storage/framework"
"k8s.io/kubernetes/test/e2e/storage/utils"
vspheretest "k8s.io/kubernetes/test/e2e/storage/vsphere"
imageutils "k8s.io/kubernetes/test/utils/image"
)
const (
// Template for iSCSI IQN.
iSCSIIQNTemplate = "iqn.2003-01.io.k8s:e2e.%s"
)
// NFS
type nfsDriver struct {
externalProvisionerPod *v1.Pod
externalPluginName string
driverInfo storageframework.DriverInfo
}
type nfsVolume struct {
serverHost string
serverPod *v1.Pod
f *framework.Framework
}
var _ storageframework.TestDriver = &nfsDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &nfsDriver{}
var _ storageframework.InlineVolumeTestDriver = &nfsDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &nfsDriver{}
var _ storageframework.DynamicPVTestDriver = &nfsDriver{}
// InitNFSDriver returns nfsDriver that implements TestDriver interface
func InitNFSDriver() storageframework.TestDriver {
return &nfsDriver{
driverInfo: storageframework.DriverInfo{
Name: "nfs",
InTreePluginName: "kubernetes.io/nfs",
MaxFileSize: storageframework.FileSizeLarge,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
),
SupportedMountOption: sets.NewString("proto=tcp", "relatime"),
RequiredMountOption: sets.NewString("vers=4.1"),
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapExec: true,
storageframework.CapRWX: true,
storageframework.CapMultiPODs: true,
},
},
}
}
func (n *nfsDriver) GetDriverInfo() *storageframework.DriverInfo {
return &n.driverInfo
}
func (n *nfsDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (n *nfsDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
nv, ok := e2evolume.(*nfsVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to NFS test volume")
return &v1.VolumeSource{
NFS: &v1.NFSVolumeSource{
Server: nv.serverHost,
Path: "/",
ReadOnly: readOnly,
},
}
}
func (n *nfsDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
nv, ok := e2evolume.(*nfsVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to NFS test volume")
return &v1.PersistentVolumeSource{
NFS: &v1.NFSVolumeSource{
Server: nv.serverHost,
Path: "/",
ReadOnly: readOnly,
},
}, nil
}
func (n *nfsDriver) GetDynamicProvisionStorageClass(config *storageframework.PerTestConfig, fsType string) *storagev1.StorageClass {
provisioner := n.externalPluginName
parameters := map[string]string{"mountOptions": "vers=4.1"}
ns := config.Framework.Namespace.Name
return storageframework.GetStorageClass(provisioner, parameters, nil, ns)
}
func (n *nfsDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
cs := f.ClientSet
ns := f.Namespace
n.externalPluginName = fmt.Sprintf("example.com/nfs-%s", ns.Name)
// TODO(mkimuram): cluster-admin gives too much right but system:persistent-volume-provisioner
// is not enough. We should create new clusterrole for testing.
err := e2eauth.BindClusterRole(cs.RbacV1(), "cluster-admin", ns.Name,
rbacv1.Subject{Kind: rbacv1.ServiceAccountKind, Namespace: ns.Name, Name: "default"})
framework.ExpectNoError(err)
err = e2eauth.WaitForAuthorizationUpdate(cs.AuthorizationV1(),
serviceaccount.MakeUsername(ns.Name, "default"),
"", "get", schema.GroupResource{Group: "storage.k8s.io", Resource: "storageclasses"}, true)
framework.ExpectNoError(err, "Failed to update authorization: %v", err)
ginkgo.By("creating an external dynamic provisioner pod")
n.externalProvisionerPod = utils.StartExternalProvisioner(cs, ns.Name, n.externalPluginName)
return &storageframework.PerTestConfig{
Driver: n,
Prefix: "nfs",
Framework: f,
}, func() {
framework.ExpectNoError(e2epod.DeletePodWithWait(cs, n.externalProvisionerPod))
clusterRoleBindingName := ns.Name + "--" + "cluster-admin"
cs.RbacV1().ClusterRoleBindings().Delete(context.TODO(), clusterRoleBindingName, *metav1.NewDeleteOptions(0))
}
}
func (n *nfsDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
ns := f.Namespace
// NewNFSServer creates a pod for InlineVolume and PreprovisionedPV,
// and startExternalProvisioner creates a pod for DynamicPV.
// Therefore, we need a different PrepareTest logic for volType.
switch volType {
case storageframework.InlineVolume:
fallthrough
case storageframework.PreprovisionedPV:
c, serverPod, serverHost := e2evolume.NewNFSServer(cs, ns.Name, []string{})
config.ServerConfig = &c
return &nfsVolume{
serverHost: serverHost,
serverPod: serverPod,
f: f,
}
case storageframework.DynamicPV:
// Do nothing
default:
framework.Failf("Unsupported volType:%v is specified", volType)
}
return nil
}
func (v *nfsVolume) DeleteVolume() {
cleanUpVolumeServer(v.f, v.serverPod)
}
// Gluster
type glusterFSDriver struct {
driverInfo storageframework.DriverInfo
}
type glusterVolume struct {
prefix string
serverPod *v1.Pod
f *framework.Framework
}
var _ storageframework.TestDriver = &glusterFSDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &glusterFSDriver{}
var _ storageframework.InlineVolumeTestDriver = &glusterFSDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &glusterFSDriver{}
// InitGlusterFSDriver returns glusterFSDriver that implements TestDriver interface
func InitGlusterFSDriver() storageframework.TestDriver {
return &glusterFSDriver{
driverInfo: storageframework.DriverInfo{
Name: "gluster",
InTreePluginName: "kubernetes.io/glusterfs",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
),
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapExec: true,
storageframework.CapRWX: true,
storageframework.CapMultiPODs: true,
},
},
}
}
func (g *glusterFSDriver) GetDriverInfo() *storageframework.DriverInfo {
return &g.driverInfo
}
func (g *glusterFSDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
e2eskipper.SkipUnlessNodeOSDistroIs("gci", "ubuntu", "custom")
}
func (g *glusterFSDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
gv, ok := e2evolume.(*glusterVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Gluster test volume")
name := gv.prefix + "-server"
return &v1.VolumeSource{
Glusterfs: &v1.GlusterfsVolumeSource{
EndpointsName: name,
// 'test_vol' comes from test/images/volumes-tester/gluster/run_gluster.sh
Path: "test_vol",
ReadOnly: readOnly,
},
}
}
func (g *glusterFSDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
gv, ok := e2evolume.(*glusterVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Gluster test volume")
name := gv.prefix + "-server"
return &v1.PersistentVolumeSource{
Glusterfs: &v1.GlusterfsPersistentVolumeSource{
EndpointsName: name,
// 'test_vol' comes from test/images/volumes-tester/gluster/run_gluster.sh
Path: "test_vol",
ReadOnly: readOnly,
},
}, nil
}
func (g *glusterFSDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: g,
Prefix: "gluster",
Framework: f,
}, func() {}
}
func (g *glusterFSDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
ns := f.Namespace
c, serverPod, _ := e2evolume.NewGlusterfsServer(cs, ns.Name)
config.ServerConfig = &c
return &glusterVolume{
prefix: config.Prefix,
serverPod: serverPod,
f: f,
}
}
func (v *glusterVolume) DeleteVolume() {
f := v.f
cs := f.ClientSet
ns := f.Namespace
name := v.prefix + "-server"
nameSpaceName := fmt.Sprintf("%s/%s", ns.Name, name)
framework.Logf("Deleting Gluster endpoints %s...", nameSpaceName)
err := cs.CoreV1().Endpoints(ns.Name).Delete(context.TODO(), name, metav1.DeleteOptions{})
if err != nil {
if !apierrors.IsNotFound(err) {
framework.Failf("Gluster deleting endpoint %s failed: %v", nameSpaceName, err)
}
framework.Logf("Gluster endpoints %q not found, assuming deleted", nameSpaceName)
}
framework.Logf("Deleting Gluster service %s...", nameSpaceName)
err = cs.CoreV1().Services(ns.Name).Delete(context.TODO(), name, metav1.DeleteOptions{})
if err != nil {
if !apierrors.IsNotFound(err) {
framework.Failf("Gluster deleting service %s failed: %v", nameSpaceName, err)
}
framework.Logf("Gluster service %q not found, assuming deleted", nameSpaceName)
}
framework.Logf("Deleting Gluster server pod %q...", v.serverPod.Name)
err = e2epod.DeletePodWithWait(cs, v.serverPod)
if err != nil {
framework.Failf("Gluster server pod delete failed: %v", err)
}
}
// iSCSI
// The iscsiadm utility and iscsi target kernel modules must be installed on all nodes.
type iSCSIDriver struct {
driverInfo storageframework.DriverInfo
}
type iSCSIVolume struct {
serverPod *v1.Pod
serverIP string
f *framework.Framework
iqn string
}
var _ storageframework.TestDriver = &iSCSIDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &iSCSIDriver{}
var _ storageframework.InlineVolumeTestDriver = &iSCSIDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &iSCSIDriver{}
// InitISCSIDriver returns iSCSIDriver that implements TestDriver interface
func InitISCSIDriver() storageframework.TestDriver {
return &iSCSIDriver{
driverInfo: storageframework.DriverInfo{
Name: "iscsi",
InTreePluginName: "kubernetes.io/iscsi",
FeatureTag: "[Feature:Volumes]",
MaxFileSize: storageframework.FileSizeMedium,
SupportedFsType: sets.NewString(
"", // Default fsType
"ext4",
),
TopologyKeys: []string{v1.LabelHostname},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
storageframework.CapTopology: true,
},
},
}
}
func (i *iSCSIDriver) GetDriverInfo() *storageframework.DriverInfo {
return &i.driverInfo
}
func (i *iSCSIDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (i *iSCSIDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
iv, ok := e2evolume.(*iSCSIVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to iSCSI test volume")
volSource := v1.VolumeSource{
ISCSI: &v1.ISCSIVolumeSource{
TargetPortal: "127.0.0.1:3260",
IQN: iv.iqn,
Lun: 0,
ReadOnly: readOnly,
},
}
if fsType != "" {
volSource.ISCSI.FSType = fsType
}
return &volSource
}
func (i *iSCSIDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
iv, ok := e2evolume.(*iSCSIVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to iSCSI test volume")
pvSource := v1.PersistentVolumeSource{
ISCSI: &v1.ISCSIPersistentVolumeSource{
TargetPortal: "127.0.0.1:3260",
IQN: iv.iqn,
Lun: 0,
ReadOnly: readOnly,
},
}
if fsType != "" {
pvSource.ISCSI.FSType = fsType
}
return &pvSource, nil
}
func (i *iSCSIDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: i,
Prefix: "iscsi",
Framework: f,
}, func() {}
}
func (i *iSCSIDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
ns := f.Namespace
c, serverPod, serverIP, iqn := newISCSIServer(cs, ns.Name)
config.ServerConfig = &c
config.ClientNodeSelection = c.ClientNodeSelection
return &iSCSIVolume{
serverPod: serverPod,
serverIP: serverIP,
iqn: iqn,
f: f,
}
}
// newISCSIServer is an iSCSI-specific wrapper for CreateStorageServer.
func newISCSIServer(cs clientset.Interface, namespace string) (config e2evolume.TestConfig, pod *v1.Pod, ip, iqn string) {
// Generate cluster-wide unique IQN
iqn = fmt.Sprintf(iSCSIIQNTemplate, namespace)
config = e2evolume.TestConfig{
Namespace: namespace,
Prefix: "iscsi",
ServerImage: imageutils.GetE2EImage(imageutils.VolumeISCSIServer),
ServerArgs: []string{iqn},
ServerVolumes: map[string]string{
// iSCSI container needs to insert modules from the host
"/lib/modules": "/lib/modules",
// iSCSI container needs to configure kernel
"/sys/kernel": "/sys/kernel",
// iSCSI source "block devices" must be available on the host
"/srv/iscsi": "/srv/iscsi",
},
ServerReadyMessage: "iscsi target started",
ServerHostNetwork: true,
}
pod, ip = e2evolume.CreateStorageServer(cs, config)
// Make sure the client runs on the same node as server so we don't need to open any firewalls.
config.ClientNodeSelection = e2epod.NodeSelection{Name: pod.Spec.NodeName}
return config, pod, ip, iqn
}
// newRBDServer is a CephRBD-specific wrapper for CreateStorageServer.
func newRBDServer(cs clientset.Interface, namespace string) (config e2evolume.TestConfig, pod *v1.Pod, secret *v1.Secret, ip string) {
config = e2evolume.TestConfig{
Namespace: namespace,
Prefix: "rbd",
ServerImage: imageutils.GetE2EImage(imageutils.VolumeRBDServer),
ServerPorts: []int{6789},
ServerVolumes: map[string]string{
"/lib/modules": "/lib/modules",
},
ServerReadyMessage: "Ceph is ready",
}
pod, ip = e2evolume.CreateStorageServer(cs, config)
// create secrets for the server
secret = &v1.Secret{
TypeMeta: metav1.TypeMeta{
Kind: "Secret",
APIVersion: "v1",
},
ObjectMeta: metav1.ObjectMeta{
Name: config.Prefix + "-secret",
},
Data: map[string][]byte{
// from test/images/volumes-tester/rbd/keyring
"key": []byte("AQDRrKNVbEevChAAEmRC+pW/KBVHxa0w/POILA=="),
},
Type: "kubernetes.io/rbd",
}
secret, err := cs.CoreV1().Secrets(config.Namespace).Create(context.TODO(), secret, metav1.CreateOptions{})
if err != nil {
framework.Failf("Failed to create secrets for Ceph RBD: %v", err)
}
return config, pod, secret, ip
}
func (v *iSCSIVolume) DeleteVolume() {
cleanUpVolumeServer(v.f, v.serverPod)
}
// Ceph RBD
type rbdDriver struct {
driverInfo storageframework.DriverInfo
}
type rbdVolume struct {
serverPod *v1.Pod
serverIP string
secret *v1.Secret
f *framework.Framework
}
var _ storageframework.TestDriver = &rbdDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &rbdDriver{}
var _ storageframework.InlineVolumeTestDriver = &rbdDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &rbdDriver{}
// InitRbdDriver returns rbdDriver that implements TestDriver interface
func InitRbdDriver() storageframework.TestDriver {
return &rbdDriver{
driverInfo: storageframework.DriverInfo{
Name: "rbd",
InTreePluginName: "kubernetes.io/rbd",
FeatureTag: "[Feature:Volumes][Serial]",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
"ext4",
),
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
},
},
}
}
func (r *rbdDriver) GetDriverInfo() *storageframework.DriverInfo {
return &r.driverInfo
}
func (r *rbdDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (r *rbdDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
rv, ok := e2evolume.(*rbdVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to RBD test volume")
volSource := v1.VolumeSource{
RBD: &v1.RBDVolumeSource{
CephMonitors: []string{rv.serverIP},
RBDPool: "rbd",
RBDImage: "foo",
RadosUser: "admin",
SecretRef: &v1.LocalObjectReference{
Name: rv.secret.Name,
},
ReadOnly: readOnly,
},
}
if fsType != "" {
volSource.RBD.FSType = fsType
}
return &volSource
}
func (r *rbdDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
rv, ok := e2evolume.(*rbdVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to RBD test volume")
f := rv.f
ns := f.Namespace
pvSource := v1.PersistentVolumeSource{
RBD: &v1.RBDPersistentVolumeSource{
CephMonitors: []string{rv.serverIP},
RBDPool: "rbd",
RBDImage: "foo",
RadosUser: "admin",
SecretRef: &v1.SecretReference{
Name: rv.secret.Name,
Namespace: ns.Name,
},
ReadOnly: readOnly,
},
}
if fsType != "" {
pvSource.RBD.FSType = fsType
}
return &pvSource, nil
}
func (r *rbdDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: r,
Prefix: "rbd",
Framework: f,
}, func() {}
}
func (r *rbdDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
ns := f.Namespace
c, serverPod, secret, serverIP := newRBDServer(cs, ns.Name)
config.ServerConfig = &c
return &rbdVolume{
serverPod: serverPod,
serverIP: serverIP,
secret: secret,
f: f,
}
}
func (v *rbdVolume) DeleteVolume() {
cleanUpVolumeServerWithSecret(v.f, v.serverPod, v.secret)
}
// Ceph
type cephFSDriver struct {
driverInfo storageframework.DriverInfo
}
type cephVolume struct {
serverPod *v1.Pod
serverIP string
secret *v1.Secret
f *framework.Framework
}
var _ storageframework.TestDriver = &cephFSDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &cephFSDriver{}
var _ storageframework.InlineVolumeTestDriver = &cephFSDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &cephFSDriver{}
// InitCephFSDriver returns cephFSDriver that implements TestDriver interface
func InitCephFSDriver() storageframework.TestDriver {
return &cephFSDriver{
driverInfo: storageframework.DriverInfo{
Name: "ceph",
InTreePluginName: "kubernetes.io/cephfs",
FeatureTag: "[Feature:Volumes][Serial]",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
),
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapExec: true,
storageframework.CapRWX: true,
storageframework.CapMultiPODs: true,
},
},
}
}
func (c *cephFSDriver) GetDriverInfo() *storageframework.DriverInfo {
return &c.driverInfo
}
func (c *cephFSDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (c *cephFSDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
cv, ok := e2evolume.(*cephVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Ceph test volume")
return &v1.VolumeSource{
CephFS: &v1.CephFSVolumeSource{
Monitors: []string{cv.serverIP + ":6789"},
User: "kube",
SecretRef: &v1.LocalObjectReference{
Name: cv.secret.Name,
},
ReadOnly: readOnly,
},
}
}
func (c *cephFSDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
cv, ok := e2evolume.(*cephVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Ceph test volume")
ns := cv.f.Namespace
return &v1.PersistentVolumeSource{
CephFS: &v1.CephFSPersistentVolumeSource{
Monitors: []string{cv.serverIP + ":6789"},
User: "kube",
SecretRef: &v1.SecretReference{
Name: cv.secret.Name,
Namespace: ns.Name,
},
ReadOnly: readOnly,
},
}, nil
}
func (c *cephFSDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: c,
Prefix: "cephfs",
Framework: f,
}, func() {}
}
func (c *cephFSDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
ns := f.Namespace
cfg, serverPod, secret, serverIP := newRBDServer(cs, ns.Name)
config.ServerConfig = &cfg
return &cephVolume{
serverPod: serverPod,
serverIP: serverIP,
secret: secret,
f: f,
}
}
func (v *cephVolume) DeleteVolume() {
cleanUpVolumeServerWithSecret(v.f, v.serverPod, v.secret)
}
// Hostpath
type hostPathDriver struct {
driverInfo storageframework.DriverInfo
}
var _ storageframework.TestDriver = &hostPathDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &hostPathDriver{}
var _ storageframework.InlineVolumeTestDriver = &hostPathDriver{}
// InitHostPathDriver returns hostPathDriver that implements TestDriver interface
func InitHostPathDriver() storageframework.TestDriver {
return &hostPathDriver{
driverInfo: storageframework.DriverInfo{
Name: "hostPath",
InTreePluginName: "kubernetes.io/host-path",
MaxFileSize: storageframework.FileSizeMedium,
SupportedFsType: sets.NewString(
"", // Default fsType
),
TopologyKeys: []string{v1.LabelHostname},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapMultiPODs: true,
storageframework.CapSingleNodeVolume: true,
storageframework.CapTopology: true,
},
},
}
}
func (h *hostPathDriver) GetDriverInfo() *storageframework.DriverInfo {
return &h.driverInfo
}
func (h *hostPathDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (h *hostPathDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
// hostPath doesn't support readOnly volume
if readOnly {
return nil
}
return &v1.VolumeSource{
HostPath: &v1.HostPathVolumeSource{
Path: "/tmp",
},
}
}
func (h *hostPathDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: h,
Prefix: "hostpath",
Framework: f,
}, func() {}
}
func (h *hostPathDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
// pods should be scheduled on the node
node, err := e2enode.GetRandomReadySchedulableNode(cs)
framework.ExpectNoError(err)
config.ClientNodeSelection = e2epod.NodeSelection{Name: node.Name}
return nil
}
// HostPathSymlink
type hostPathSymlinkDriver struct {
driverInfo storageframework.DriverInfo
}
type hostPathSymlinkVolume struct {
targetPath string
sourcePath string
prepPod *v1.Pod
f *framework.Framework
}
var _ storageframework.TestDriver = &hostPathSymlinkDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &hostPathSymlinkDriver{}
var _ storageframework.InlineVolumeTestDriver = &hostPathSymlinkDriver{}
// InitHostPathSymlinkDriver returns hostPathSymlinkDriver that implements TestDriver interface
func InitHostPathSymlinkDriver() storageframework.TestDriver {
return &hostPathSymlinkDriver{
driverInfo: storageframework.DriverInfo{
Name: "hostPathSymlink",
InTreePluginName: "kubernetes.io/host-path",
MaxFileSize: storageframework.FileSizeMedium,
SupportedFsType: sets.NewString(
"", // Default fsType
),
TopologyKeys: []string{v1.LabelHostname},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapMultiPODs: true,
storageframework.CapSingleNodeVolume: true,
storageframework.CapTopology: true,
},
},
}
}
func (h *hostPathSymlinkDriver) GetDriverInfo() *storageframework.DriverInfo {
return &h.driverInfo
}
func (h *hostPathSymlinkDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (h *hostPathSymlinkDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
hv, ok := e2evolume.(*hostPathSymlinkVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Hostpath Symlink test volume")
// hostPathSymlink doesn't support readOnly volume
if readOnly {
return nil
}
return &v1.VolumeSource{
HostPath: &v1.HostPathVolumeSource{
Path: hv.targetPath,
},
}
}
func (h *hostPathSymlinkDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: h,
Prefix: "hostpathsymlink",
Framework: f,
}, func() {}
}
func (h *hostPathSymlinkDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
cs := f.ClientSet
sourcePath := fmt.Sprintf("/tmp/%v", f.Namespace.Name)
targetPath := fmt.Sprintf("/tmp/%v-link", f.Namespace.Name)
volumeName := "test-volume"
// pods should be scheduled on the node
node, err := e2enode.GetRandomReadySchedulableNode(cs)
framework.ExpectNoError(err)
config.ClientNodeSelection = e2epod.NodeSelection{Name: node.Name}
cmd := fmt.Sprintf("mkdir %v -m 777 && ln -s %v %v", sourcePath, sourcePath, targetPath)
privileged := true
// Launch pod to initialize hostPath directory and symlink
prepPod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: fmt.Sprintf("hostpath-symlink-prep-%s", f.Namespace.Name),
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: fmt.Sprintf("init-volume-%s", f.Namespace.Name),
Image: imageutils.GetE2EImage(imageutils.BusyBox),
Command: []string{"/bin/sh", "-ec", cmd},
VolumeMounts: []v1.VolumeMount{
{
Name: volumeName,
MountPath: "/tmp",
},
},
SecurityContext: &v1.SecurityContext{
Privileged: &privileged,
},
},
},
RestartPolicy: v1.RestartPolicyNever,
Volumes: []v1.Volume{
{
Name: volumeName,
VolumeSource: v1.VolumeSource{
HostPath: &v1.HostPathVolumeSource{
Path: "/tmp",
},
},
},
},
NodeName: node.Name,
},
}
// h.prepPod will be reused in cleanupDriver.
pod, err := f.ClientSet.CoreV1().Pods(f.Namespace.Name).Create(context.TODO(), prepPod, metav1.CreateOptions{})
framework.ExpectNoError(err, "while creating hostPath init pod")
err = e2epod.WaitForPodSuccessInNamespaceTimeout(f.ClientSet, pod.Name, pod.Namespace, f.Timeouts.PodStart)
framework.ExpectNoError(err, "while waiting for hostPath init pod to succeed")
err = e2epod.DeletePodWithWait(f.ClientSet, pod)
framework.ExpectNoError(err, "while deleting hostPath init pod")
return &hostPathSymlinkVolume{
sourcePath: sourcePath,
targetPath: targetPath,
prepPod: prepPod,
f: f,
}
}
func (v *hostPathSymlinkVolume) DeleteVolume() {
f := v.f
cmd := fmt.Sprintf("rm -rf %v&& rm -rf %v", v.targetPath, v.sourcePath)
v.prepPod.Spec.Containers[0].Command = []string{"/bin/sh", "-ec", cmd}
pod, err := f.ClientSet.CoreV1().Pods(f.Namespace.Name).Create(context.TODO(), v.prepPod, metav1.CreateOptions{})
framework.ExpectNoError(err, "while creating hostPath teardown pod")
err = e2epod.WaitForPodSuccessInNamespaceTimeout(f.ClientSet, pod.Name, pod.Namespace, f.Timeouts.PodStart)
framework.ExpectNoError(err, "while waiting for hostPath teardown pod to succeed")
err = e2epod.DeletePodWithWait(f.ClientSet, pod)
framework.ExpectNoError(err, "while deleting hostPath teardown pod")
}
// emptydir
type emptydirDriver struct {
driverInfo storageframework.DriverInfo
}
var _ storageframework.TestDriver = &emptydirDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &emptydirDriver{}
var _ storageframework.InlineVolumeTestDriver = &emptydirDriver{}
// InitEmptydirDriver returns emptydirDriver that implements TestDriver interface
func InitEmptydirDriver() storageframework.TestDriver {
return &emptydirDriver{
driverInfo: storageframework.DriverInfo{
Name: "emptydir",
InTreePluginName: "kubernetes.io/empty-dir",
MaxFileSize: storageframework.FileSizeMedium,
SupportedFsType: sets.NewString(
"", // Default fsType
),
Capabilities: map[storageframework.Capability]bool{
storageframework.CapExec: true,
storageframework.CapSingleNodeVolume: true,
},
},
}
}
func (e *emptydirDriver) GetDriverInfo() *storageframework.DriverInfo {
return &e.driverInfo
}
func (e *emptydirDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (e *emptydirDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
// emptydir doesn't support readOnly volume
if readOnly {
return nil
}
return &v1.VolumeSource{
EmptyDir: &v1.EmptyDirVolumeSource{},
}
}
func (e *emptydirDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
return nil
}
func (e *emptydirDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: e,
Prefix: "emptydir",
Framework: f,
}, func() {}
}
// Cinder
// This driver assumes that OpenStack client tools are installed
// (/usr/bin/nova, /usr/bin/cinder and /usr/bin/keystone)
// and that the usual OpenStack authentication env. variables are set
// (OS_USERNAME, OS_PASSWORD, OS_TENANT_NAME at least).
type cinderDriver struct {
driverInfo storageframework.DriverInfo
}
type cinderVolume struct {
volumeName string
volumeID string
}
var _ storageframework.TestDriver = &cinderDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &cinderDriver{}
var _ storageframework.InlineVolumeTestDriver = &cinderDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &cinderDriver{}
var _ storageframework.DynamicPVTestDriver = &cinderDriver{}
// InitCinderDriver returns cinderDriver that implements TestDriver interface
func InitCinderDriver() storageframework.TestDriver {
return &cinderDriver{
driverInfo: storageframework.DriverInfo{
Name: "cinder",
InTreePluginName: "kubernetes.io/cinder",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
),
TopologyKeys: []string{v1.LabelFailureDomainBetaZone},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapExec: true,
storageframework.CapBlock: true,
// Cinder supports volume limits, but the test creates large
// number of volumes and times out test suites.
storageframework.CapVolumeLimits: false,
storageframework.CapTopology: true,
},
},
}
}
func (c *cinderDriver) GetDriverInfo() *storageframework.DriverInfo {
return &c.driverInfo
}
func (c *cinderDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
e2eskipper.SkipUnlessProviderIs("openstack")
}
func (c *cinderDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
cv, ok := e2evolume.(*cinderVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Cinder test volume")
volSource := v1.VolumeSource{
Cinder: &v1.CinderVolumeSource{
VolumeID: cv.volumeID,
ReadOnly: readOnly,
},
}
if fsType != "" {
volSource.Cinder.FSType = fsType
}
return &volSource
}
func (c *cinderDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
cv, ok := e2evolume.(*cinderVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Cinder test volume")
pvSource := v1.PersistentVolumeSource{
Cinder: &v1.CinderPersistentVolumeSource{
VolumeID: cv.volumeID,
ReadOnly: readOnly,
},
}
if fsType != "" {
pvSource.Cinder.FSType = fsType
}
return &pvSource, nil
}
func (c *cinderDriver) GetDynamicProvisionStorageClass(config *storageframework.PerTestConfig, fsType string) *storagev1.StorageClass {
provisioner := "kubernetes.io/cinder"
parameters := map[string]string{}
if fsType != "" {
parameters["fsType"] = fsType
}
ns := config.Framework.Namespace.Name
return storageframework.GetStorageClass(provisioner, parameters, nil, ns)
}
func (c *cinderDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: c,
Prefix: "cinder",
Framework: f,
}, func() {}
}
func (c *cinderDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
ns := f.Namespace
// We assume that namespace.Name is a random string
volumeName := ns.Name
ginkgo.By("creating a test Cinder volume")
output, err := exec.Command("cinder", "create", "--display-name="+volumeName, "1").CombinedOutput()
outputString := string(output[:])
framework.Logf("cinder output:\n%s", outputString)
framework.ExpectNoError(err)
// Parse 'id'' from stdout. Expected format:
// | attachments | [] |
// | availability_zone | nova |
// ...
// | id | 1d6ff08f-5d1c-41a4-ad72-4ef872cae685 |
volumeID := ""
for _, line := range strings.Split(outputString, "\n") {
fields := strings.Fields(line)
if len(fields) != 5 {
continue
}
if fields[1] != "id" {
continue
}
volumeID = fields[3]
break
}
framework.Logf("Volume ID: %s", volumeID)
framework.ExpectNotEqual(volumeID, "")
return &cinderVolume{
volumeName: volumeName,
volumeID: volumeID,
}
}
func (v *cinderVolume) DeleteVolume() {
id := v.volumeID
name := v.volumeName
// Try to delete the volume for several seconds - it takes
// a while for the plugin to detach it.
var output []byte
var err error
timeout := time.Second * 120
framework.Logf("Waiting up to %v for removal of cinder volume %s / %s", timeout, id, name)
for start := time.Now(); time.Since(start) < timeout; time.Sleep(5 * time.Second) {
output, err = exec.Command("cinder", "delete", id).CombinedOutput()
if err == nil {
framework.Logf("Cinder volume %s deleted", id)
return
}
framework.Logf("Failed to delete volume %s / %s: %v\n%s", id, name, err, string(output))
}
// Timed out, try to get "cinder show <volume>" output for easier debugging
showOutput, showErr := exec.Command("cinder", "show", id).CombinedOutput()
if showErr != nil {
framework.Logf("Failed to show volume %s / %s: %v\n%s", id, name, showErr, string(showOutput))
} else {
framework.Logf("Volume %s / %s:\n%s", id, name, string(showOutput))
}
framework.Failf("Failed to delete pre-provisioned volume %s / %s: %v\n%s", id, name, err, string(output[:]))
}
// GCE
type gcePdDriver struct {
driverInfo storageframework.DriverInfo
}
type gcePdVolume struct {
volumeName string
}
var _ storageframework.TestDriver = &gcePdDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &gcePdDriver{}
var _ storageframework.InlineVolumeTestDriver = &gcePdDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &gcePdDriver{}
var _ storageframework.DynamicPVTestDriver = &gcePdDriver{}
// InitGcePdDriver returns gcePdDriver that implements TestDriver interface
func InitGcePdDriver() storageframework.TestDriver {
supportedTypes := sets.NewString(
"", // Default fsType
"ext2",
"ext3",
"ext4",
"xfs",
)
return &gcePdDriver{
driverInfo: storageframework.DriverInfo{
Name: "gcepd",
InTreePluginName: "kubernetes.io/gce-pd",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: supportedTypes,
SupportedMountOption: sets.NewString("debug", "nouid32"),
TopologyKeys: []string{v1.LabelTopologyZone},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
storageframework.CapControllerExpansion: true,
storageframework.CapOnlineExpansion: true,
storageframework.CapNodeExpansion: true,
// GCE supports volume limits, but the test creates large
// number of volumes and times out test suites.
storageframework.CapVolumeLimits: false,
storageframework.CapTopology: true,
},
},
}
}
// InitWindowsGcePdDriver returns gcePdDriver running on Windows cluster that implements TestDriver interface
// In current test structure, it first initialize the driver and then set up
// the new framework, so we cannot get the correct OS here and select which file system is supported.
// So here uses a separate Windows in-tree gce pd driver
func InitWindowsGcePdDriver() storageframework.TestDriver {
supportedTypes := sets.NewString(
"ntfs",
)
return &gcePdDriver{
driverInfo: storageframework.DriverInfo{
Name: "windows-gcepd",
InTreePluginName: "kubernetes.io/gce-pd",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: supportedTypes,
TopologyKeys: []string{v1.LabelZoneFailureDomain},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapControllerExpansion: false,
storageframework.CapPersistence: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
// GCE supports volume limits, but the test creates large
// number of volumes and times out test suites.
storageframework.CapVolumeLimits: false,
storageframework.CapTopology: true,
},
},
}<|fim▁hole|>}
func (g *gcePdDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
e2eskipper.SkipUnlessProviderIs("gce", "gke")
if pattern.FeatureTag == "[Feature:Windows]" {
e2eskipper.SkipUnlessNodeOSDistroIs("windows")
}
}
func (g *gcePdDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
gv, ok := e2evolume.(*gcePdVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to GCE PD test volume")
volSource := v1.VolumeSource{
GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{
PDName: gv.volumeName,
ReadOnly: readOnly,
},
}
if fsType != "" {
volSource.GCEPersistentDisk.FSType = fsType
}
return &volSource
}
func (g *gcePdDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
gv, ok := e2evolume.(*gcePdVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to GCE PD test volume")
pvSource := v1.PersistentVolumeSource{
GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{
PDName: gv.volumeName,
ReadOnly: readOnly,
},
}
if fsType != "" {
pvSource.GCEPersistentDisk.FSType = fsType
}
return &pvSource, nil
}
func (g *gcePdDriver) GetDynamicProvisionStorageClass(config *storageframework.PerTestConfig, fsType string) *storagev1.StorageClass {
provisioner := "kubernetes.io/gce-pd"
parameters := map[string]string{}
if fsType != "" {
parameters["fsType"] = fsType
}
ns := config.Framework.Namespace.Name
delayedBinding := storagev1.VolumeBindingWaitForFirstConsumer
return storageframework.GetStorageClass(provisioner, parameters, &delayedBinding, ns)
}
func (g *gcePdDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
config := &storageframework.PerTestConfig{
Driver: g,
Prefix: "gcepd",
Framework: f,
}
if framework.NodeOSDistroIs("windows") {
config.ClientNodeSelection = e2epod.NodeSelection{
Selector: map[string]string{
"kubernetes.io/os": "windows",
},
}
}
return config, func() {}
}
func (g *gcePdDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
zone := getInlineVolumeZone(config.Framework)
if volType == storageframework.InlineVolume {
// PD will be created in framework.TestContext.CloudConfig.Zone zone,
// so pods should be also scheduled there.
config.ClientNodeSelection = e2epod.NodeSelection{
Selector: map[string]string{
v1.LabelFailureDomainBetaZone: zone,
},
}
}
ginkgo.By("creating a test gce pd volume")
vname, err := e2epv.CreatePDWithRetryAndZone(zone)
framework.ExpectNoError(err)
return &gcePdVolume{
volumeName: vname,
}
}
func (v *gcePdVolume) DeleteVolume() {
e2epv.DeletePDWithRetry(v.volumeName)
}
// vSphere
type vSphereDriver struct {
driverInfo storageframework.DriverInfo
}
type vSphereVolume struct {
volumePath string
nodeInfo *vspheretest.NodeInfo
}
var _ storageframework.TestDriver = &vSphereDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &vSphereDriver{}
var _ storageframework.InlineVolumeTestDriver = &vSphereDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &vSphereDriver{}
var _ storageframework.DynamicPVTestDriver = &vSphereDriver{}
// InitVSphereDriver returns vSphereDriver that implements TestDriver interface
func InitVSphereDriver() storageframework.TestDriver {
return &vSphereDriver{
driverInfo: storageframework.DriverInfo{
Name: "vsphere",
InTreePluginName: "kubernetes.io/vsphere-volume",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
"ext4",
"ntfs",
),
TopologyKeys: []string{v1.LabelFailureDomainBetaZone},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
storageframework.CapTopology: true,
},
},
}
}
func (v *vSphereDriver) GetDriverInfo() *storageframework.DriverInfo {
return &v.driverInfo
}
func (v *vSphereDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
e2eskipper.SkipUnlessProviderIs("vsphere")
}
func (v *vSphereDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
vsv, ok := e2evolume.(*vSphereVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to vSphere test volume")
// vSphere driver doesn't seem to support readOnly volume
// TODO: check if it is correct
if readOnly {
return nil
}
volSource := v1.VolumeSource{
VsphereVolume: &v1.VsphereVirtualDiskVolumeSource{
VolumePath: vsv.volumePath,
},
}
if fsType != "" {
volSource.VsphereVolume.FSType = fsType
}
return &volSource
}
func (v *vSphereDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
vsv, ok := e2evolume.(*vSphereVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to vSphere test volume")
// vSphere driver doesn't seem to support readOnly volume
// TODO: check if it is correct
if readOnly {
return nil, nil
}
pvSource := v1.PersistentVolumeSource{
VsphereVolume: &v1.VsphereVirtualDiskVolumeSource{
VolumePath: vsv.volumePath,
},
}
if fsType != "" {
pvSource.VsphereVolume.FSType = fsType
}
return &pvSource, nil
}
func (v *vSphereDriver) GetDynamicProvisionStorageClass(config *storageframework.PerTestConfig, fsType string) *storagev1.StorageClass {
provisioner := "kubernetes.io/vsphere-volume"
parameters := map[string]string{}
if fsType != "" {
parameters["fsType"] = fsType
}
ns := config.Framework.Namespace.Name
return storageframework.GetStorageClass(provisioner, parameters, nil, ns)
}
func (v *vSphereDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: v,
Prefix: "vsphere",
Framework: f,
}, func() {}
}
func (v *vSphereDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
f := config.Framework
vspheretest.Bootstrap(f)
nodeInfo := vspheretest.GetReadySchedulableRandomNodeInfo()
volumePath, err := nodeInfo.VSphere.CreateVolume(&vspheretest.VolumeOptions{}, nodeInfo.DataCenterRef)
framework.ExpectNoError(err)
return &vSphereVolume{
volumePath: volumePath,
nodeInfo: nodeInfo,
}
}
func (v *vSphereVolume) DeleteVolume() {
v.nodeInfo.VSphere.DeleteVolume(v.volumePath, v.nodeInfo.DataCenterRef)
}
// Azure Disk
type azureDiskDriver struct {
driverInfo storageframework.DriverInfo
}
type azureDiskVolume struct {
volumeName string
}
var _ storageframework.TestDriver = &azureDiskDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &azureDiskDriver{}
var _ storageframework.InlineVolumeTestDriver = &azureDiskDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &azureDiskDriver{}
var _ storageframework.DynamicPVTestDriver = &azureDiskDriver{}
// InitAzureDiskDriver returns azureDiskDriver that implements TestDriver interface
func InitAzureDiskDriver() storageframework.TestDriver {
return &azureDiskDriver{
driverInfo: storageframework.DriverInfo{
Name: "azure-disk",
InTreePluginName: "kubernetes.io/azure-disk",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
"ext4",
"xfs",
),
TopologyKeys: []string{v1.LabelFailureDomainBetaZone},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
// Azure supports volume limits, but the test creates large
// number of volumes and times out test suites.
storageframework.CapVolumeLimits: false,
storageframework.CapTopology: true,
},
},
}
}
func (a *azureDiskDriver) GetDriverInfo() *storageframework.DriverInfo {
return &a.driverInfo
}
func (a *azureDiskDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
e2eskipper.SkipUnlessProviderIs("azure")
}
func (a *azureDiskDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
av, ok := e2evolume.(*azureDiskVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Azure test volume")
diskName := av.volumeName[(strings.LastIndex(av.volumeName, "/") + 1):]
kind := v1.AzureManagedDisk
volSource := v1.VolumeSource{
AzureDisk: &v1.AzureDiskVolumeSource{
DiskName: diskName,
DataDiskURI: av.volumeName,
Kind: &kind,
ReadOnly: &readOnly,
},
}
if fsType != "" {
volSource.AzureDisk.FSType = &fsType
}
return &volSource
}
func (a *azureDiskDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
av, ok := e2evolume.(*azureDiskVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to Azure test volume")
diskName := av.volumeName[(strings.LastIndex(av.volumeName, "/") + 1):]
kind := v1.AzureManagedDisk
pvSource := v1.PersistentVolumeSource{
AzureDisk: &v1.AzureDiskVolumeSource{
DiskName: diskName,
DataDiskURI: av.volumeName,
Kind: &kind,
ReadOnly: &readOnly,
},
}
if fsType != "" {
pvSource.AzureDisk.FSType = &fsType
}
return &pvSource, nil
}
func (a *azureDiskDriver) GetDynamicProvisionStorageClass(config *storageframework.PerTestConfig, fsType string) *storagev1.StorageClass {
provisioner := "kubernetes.io/azure-disk"
parameters := map[string]string{}
if fsType != "" {
parameters["fsType"] = fsType
}
ns := config.Framework.Namespace.Name
delayedBinding := storagev1.VolumeBindingWaitForFirstConsumer
return storageframework.GetStorageClass(provisioner, parameters, &delayedBinding, ns)
}
func (a *azureDiskDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
return &storageframework.PerTestConfig{
Driver: a,
Prefix: "azure",
Framework: f,
}, func() {}
}
func (a *azureDiskDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
ginkgo.By("creating a test azure disk volume")
zone := getInlineVolumeZone(config.Framework)
if volType == storageframework.InlineVolume {
// PD will be created in framework.TestContext.CloudConfig.Zone zone,
// so pods should be also scheduled there.
config.ClientNodeSelection = e2epod.NodeSelection{
Selector: map[string]string{
v1.LabelFailureDomainBetaZone: zone,
},
}
}
volumeName, err := e2epv.CreatePDWithRetryAndZone(zone)
framework.ExpectNoError(err)
return &azureDiskVolume{
volumeName: volumeName,
}
}
func (v *azureDiskVolume) DeleteVolume() {
e2epv.DeletePDWithRetry(v.volumeName)
}
// AWS
type awsDriver struct {
driverInfo storageframework.DriverInfo
}
type awsVolume struct {
volumeName string
}
var _ storageframework.TestDriver = &awsDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &awsDriver{}
var _ storageframework.InlineVolumeTestDriver = &awsDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &awsDriver{}
var _ storageframework.DynamicPVTestDriver = &awsDriver{}
// InitAwsDriver returns awsDriver that implements TestDriver interface
func InitAwsDriver() storageframework.TestDriver {
return &awsDriver{
driverInfo: storageframework.DriverInfo{
Name: "aws",
InTreePluginName: "kubernetes.io/aws-ebs",
MaxFileSize: storageframework.FileSizeMedium,
SupportedSizeRange: e2evolume.SizeRange{
Min: "1Gi",
},
SupportedFsType: sets.NewString(
"", // Default fsType
"ext4",
"xfs",
"ntfs",
),
SupportedMountOption: sets.NewString("debug", "nouid32"),
TopologyKeys: []string{v1.LabelTopologyZone},
Capabilities: map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
storageframework.CapControllerExpansion: true,
storageframework.CapNodeExpansion: true,
storageframework.CapOnlineExpansion: true,
// AWS supports volume limits, but the test creates large
// number of volumes and times out test suites.
storageframework.CapVolumeLimits: false,
storageframework.CapTopology: true,
},
},
}
}
func (a *awsDriver) GetDriverInfo() *storageframework.DriverInfo {
return &a.driverInfo
}
func (a *awsDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
e2eskipper.SkipUnlessProviderIs("aws")
}
func (a *awsDriver) GetVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) *v1.VolumeSource {
av, ok := e2evolume.(*awsVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to AWS test volume")
volSource := v1.VolumeSource{
AWSElasticBlockStore: &v1.AWSElasticBlockStoreVolumeSource{
VolumeID: av.volumeName,
ReadOnly: readOnly,
},
}
if fsType != "" {
volSource.AWSElasticBlockStore.FSType = fsType
}
return &volSource
}
func (a *awsDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
av, ok := e2evolume.(*awsVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to AWS test volume")
pvSource := v1.PersistentVolumeSource{
AWSElasticBlockStore: &v1.AWSElasticBlockStoreVolumeSource{
VolumeID: av.volumeName,
ReadOnly: readOnly,
},
}
if fsType != "" {
pvSource.AWSElasticBlockStore.FSType = fsType
}
return &pvSource, nil
}
func (a *awsDriver) GetDynamicProvisionStorageClass(config *storageframework.PerTestConfig, fsType string) *storagev1.StorageClass {
provisioner := "kubernetes.io/aws-ebs"
parameters := map[string]string{}
if fsType != "" {
parameters["fsType"] = fsType
}
ns := config.Framework.Namespace.Name
delayedBinding := storagev1.VolumeBindingWaitForFirstConsumer
return storageframework.GetStorageClass(provisioner, parameters, &delayedBinding, ns)
}
func (a *awsDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
config := &storageframework.PerTestConfig{
Driver: a,
Prefix: "aws",
Framework: f,
}
if framework.NodeOSDistroIs("windows") {
config.ClientNodeSelection = e2epod.NodeSelection{
Selector: map[string]string{
"kubernetes.io/os": "windows",
},
}
}
return config, func() {}
}
func (a *awsDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
zone := getInlineVolumeZone(config.Framework)
if volType == storageframework.InlineVolume {
// PD will be created in framework.TestContext.CloudConfig.Zone zone,
// so pods should be also scheduled there.
config.ClientNodeSelection = e2epod.NodeSelection{
Selector: map[string]string{
v1.LabelTopologyZone: zone,
},
}
}
ginkgo.By("creating a test aws volume")
vname, err := e2epv.CreatePDWithRetryAndZone(zone)
framework.ExpectNoError(err)
return &awsVolume{
volumeName: vname,
}
}
func (v *awsVolume) DeleteVolume() {
e2epv.DeletePDWithRetry(v.volumeName)
}
// local
type localDriver struct {
driverInfo storageframework.DriverInfo
node *v1.Node
hostExec utils.HostExec
// volumeType represents local volume type we are testing, e.g. tmpfs,
// directory, block device.
volumeType utils.LocalVolumeType
ltrMgr utils.LocalTestResourceManager
}
type localVolume struct {
ltrMgr utils.LocalTestResourceManager
ltr *utils.LocalTestResource
}
var (
// capabilities
defaultLocalVolumeCapabilities = map[storageframework.Capability]bool{
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: false,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
storageframework.CapSingleNodeVolume: true,
}
localVolumeCapabitilies = map[utils.LocalVolumeType]map[storageframework.Capability]bool{
utils.LocalVolumeBlock: {
storageframework.CapPersistence: true,
storageframework.CapFsGroup: true,
storageframework.CapBlock: true,
storageframework.CapExec: true,
storageframework.CapMultiPODs: true,
storageframework.CapSingleNodeVolume: true,
},
}
// fstype
defaultLocalVolumeSupportedFsTypes = sets.NewString("")
localVolumeSupportedFsTypes = map[utils.LocalVolumeType]sets.String{
utils.LocalVolumeBlock: sets.NewString(
"", // Default fsType
"ext4",
//"xfs", disabled see issue https://github.com/kubernetes/kubernetes/issues/74095
),
}
// max file size
defaultLocalVolumeMaxFileSize = storageframework.FileSizeSmall
localVolumeMaxFileSizes = map[utils.LocalVolumeType]int64{}
)
var _ storageframework.TestDriver = &localDriver{}
var _ storageframework.PreprovisionedVolumeTestDriver = &localDriver{}
var _ storageframework.PreprovisionedPVTestDriver = &localDriver{}
// InitLocalDriverWithVolumeType initializes the local driver based on the volume type.
func InitLocalDriverWithVolumeType(volumeType utils.LocalVolumeType) func() storageframework.TestDriver {
maxFileSize := defaultLocalVolumeMaxFileSize
if maxFileSizeByVolType, ok := localVolumeMaxFileSizes[volumeType]; ok {
maxFileSize = maxFileSizeByVolType
}
supportedFsTypes := defaultLocalVolumeSupportedFsTypes
if supportedFsTypesByType, ok := localVolumeSupportedFsTypes[volumeType]; ok {
supportedFsTypes = supportedFsTypesByType
}
capabilities := defaultLocalVolumeCapabilities
if capabilitiesByType, ok := localVolumeCapabitilies[volumeType]; ok {
capabilities = capabilitiesByType
}
return func() storageframework.TestDriver {
// custom tag to distinguish from tests of other volume types
featureTag := fmt.Sprintf("[LocalVolumeType: %s]", volumeType)
// For GCE Local SSD volumes, we must run serially
if volumeType == utils.LocalVolumeGCELocalSSD {
featureTag += " [Serial]"
}
return &localDriver{
driverInfo: storageframework.DriverInfo{
Name: "local",
InTreePluginName: "kubernetes.io/local-volume",
FeatureTag: featureTag,
MaxFileSize: maxFileSize,
SupportedFsType: supportedFsTypes,
Capabilities: capabilities,
},
volumeType: volumeType,
}
}
}
func (l *localDriver) GetDriverInfo() *storageframework.DriverInfo {
return &l.driverInfo
}
func (l *localDriver) SkipUnsupportedTest(pattern storageframework.TestPattern) {
}
func (l *localDriver) PrepareTest(f *framework.Framework) (*storageframework.PerTestConfig, func()) {
var err error
l.node, err = e2enode.GetRandomReadySchedulableNode(f.ClientSet)
framework.ExpectNoError(err)
l.hostExec = utils.NewHostExec(f)
l.ltrMgr = utils.NewLocalResourceManager("local-driver", l.hostExec, "/tmp")
// This can't be done in SkipUnsupportedTest because the test framework is not initialized yet
if l.volumeType == utils.LocalVolumeGCELocalSSD {
ssdInterface := "scsi"
filesystemType := "fs"
ssdCmd := fmt.Sprintf("ls -1 /mnt/disks/by-uuid/google-local-ssds-%s-%s/ | wc -l", ssdInterface, filesystemType)
res, err := l.hostExec.IssueCommandWithResult(ssdCmd, l.node)
framework.ExpectNoError(err)
num, err := strconv.Atoi(strings.TrimSpace(res))
framework.ExpectNoError(err)
if num < 1 {
e2eskipper.Skipf("Requires at least 1 %s %s localSSD ", ssdInterface, filesystemType)
}
}
return &storageframework.PerTestConfig{
Driver: l,
Prefix: "local",
Framework: f,
ClientNodeSelection: e2epod.NodeSelection{Name: l.node.Name},
}, func() {
l.hostExec.Cleanup()
}
}
func (l *localDriver) CreateVolume(config *storageframework.PerTestConfig, volType storageframework.TestVolType) storageframework.TestVolume {
switch volType {
case storageframework.PreprovisionedPV:
node := l.node
// assign this to schedule pod on this node
config.ClientNodeSelection = e2epod.NodeSelection{Name: node.Name}
return &localVolume{
ltrMgr: l.ltrMgr,
ltr: l.ltrMgr.Create(node, l.volumeType, nil),
}
default:
framework.Failf("Unsupported volType: %v is specified", volType)
}
return nil
}
func (v *localVolume) DeleteVolume() {
v.ltrMgr.Remove(v.ltr)
}
func (l *localDriver) nodeAffinityForNode(node *v1.Node) *v1.VolumeNodeAffinity {
nodeKey := "kubernetes.io/hostname"
if node.Labels == nil {
framework.Failf("Node does not have labels")
}
nodeValue, found := node.Labels[nodeKey]
if !found {
framework.Failf("Node does not have required label %q", nodeKey)
}
return &v1.VolumeNodeAffinity{
Required: &v1.NodeSelector{
NodeSelectorTerms: []v1.NodeSelectorTerm{
{
MatchExpressions: []v1.NodeSelectorRequirement{
{
Key: nodeKey,
Operator: v1.NodeSelectorOpIn,
Values: []string{nodeValue},
},
},
},
},
},
}
}
func (l *localDriver) GetPersistentVolumeSource(readOnly bool, fsType string, e2evolume storageframework.TestVolume) (*v1.PersistentVolumeSource, *v1.VolumeNodeAffinity) {
lv, ok := e2evolume.(*localVolume)
framework.ExpectEqual(ok, true, "Failed to cast test volume to local test volume")
return &v1.PersistentVolumeSource{
Local: &v1.LocalVolumeSource{
Path: lv.ltr.Path,
FSType: &fsType,
},
}, l.nodeAffinityForNode(lv.ltr.Node)
}
// cleanUpVolumeServer is a wrapper of cleanup function for volume server without secret created by specific CreateStorageServer function.
func cleanUpVolumeServer(f *framework.Framework, serverPod *v1.Pod) {
cleanUpVolumeServerWithSecret(f, serverPod, nil)
}
func getInlineVolumeZone(f *framework.Framework) string {
if framework.TestContext.CloudConfig.Zone != "" {
return framework.TestContext.CloudConfig.Zone
}
// if zone is not specified we will randomly pick a zone from schedulable nodes for inline tests
node, err := e2enode.GetRandomReadySchedulableNode(f.ClientSet)
framework.ExpectNoError(err)
zone, ok := node.Labels[v1.LabelFailureDomainBetaZone]
if ok {
return zone
}
return ""
}
// cleanUpVolumeServerWithSecret is a wrapper of cleanup function for volume server with secret created by specific CreateStorageServer function.
func cleanUpVolumeServerWithSecret(f *framework.Framework, serverPod *v1.Pod, secret *v1.Secret) {
cs := f.ClientSet
ns := f.Namespace
if secret != nil {
framework.Logf("Deleting server secret %q...", secret.Name)
err := cs.CoreV1().Secrets(ns.Name).Delete(context.TODO(), secret.Name, metav1.DeleteOptions{})
if err != nil {
framework.Logf("Delete secret failed: %v", err)
}
}
framework.Logf("Deleting server pod %q...", serverPod.Name)
err := e2epod.DeletePodWithWait(cs, serverPod)
if err != nil {
framework.Logf("Server pod delete failed: %v", err)
}
}<|fim▁end|>
|
}
func (g *gcePdDriver) GetDriverInfo() *storageframework.DriverInfo {
return &g.driverInfo
|
<|file_name|>ExternalShuffleClient.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.shuffle;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Future;
import com.codahale.metrics.MetricSet;
import com.google.common.collect.Lists;
import org.apache.spark.network.client.RpcResponseCallback;
import org.apache.spark.network.shuffle.protocol.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;<|fim▁hole|>import org.apache.spark.network.client.TransportClientFactory;
import org.apache.spark.network.crypto.AuthClientBootstrap;
import org.apache.spark.network.sasl.SecretKeyHolder;
import org.apache.spark.network.server.NoOpRpcHandler;
import org.apache.spark.network.util.TransportConf;
/**
* Client for reading shuffle blocks which points to an external (outside of executor) server.
* This is instead of reading shuffle blocks directly from other executors (via
* BlockTransferService), which has the downside of losing the shuffle data if we lose the
* executors.
*/
public class ExternalShuffleClient extends ShuffleClient {
private static final Logger logger = LoggerFactory.getLogger(ExternalShuffleClient.class);
private final TransportConf conf;
private final boolean authEnabled;
private final SecretKeyHolder secretKeyHolder;
private final long registrationTimeoutMs;
protected TransportClientFactory clientFactory;
protected String appId;
/**
* Creates an external shuffle client, with SASL optionally enabled. If SASL is not enabled,
* then secretKeyHolder may be null.
*/
public ExternalShuffleClient(
TransportConf conf,
SecretKeyHolder secretKeyHolder,
boolean authEnabled,
long registrationTimeoutMs) {
this.conf = conf;
this.secretKeyHolder = secretKeyHolder;
this.authEnabled = authEnabled;
this.registrationTimeoutMs = registrationTimeoutMs;
}
protected void checkInit() {
assert appId != null : "Called before init()";
}
/**
* Initializes the ShuffleClient, specifying this Executor's appId.
* Must be called before any other method on the ShuffleClient.
*/
public void init(String appId) {
this.appId = appId;
TransportContext context = new TransportContext(conf, new NoOpRpcHandler(), true, true);
List<TransportClientBootstrap> bootstraps = Lists.newArrayList();
if (authEnabled) {
bootstraps.add(new AuthClientBootstrap(conf, appId, secretKeyHolder));
}
clientFactory = context.createClientFactory(bootstraps);
}
@Override
public void fetchBlocks(
String host,
int port,
String execId,
String[] blockIds,
BlockFetchingListener listener,
DownloadFileManager downloadFileManager) {
checkInit();
logger.debug("External shuffle fetch from {}:{} (executor id {})", host, port, execId);
try {
RetryingBlockFetcher.BlockFetchStarter blockFetchStarter =
(blockIds1, listener1) -> {
TransportClient client = clientFactory.createClient(host, port);
new OneForOneBlockFetcher(client, appId, execId,
blockIds1, listener1, conf, downloadFileManager).start();
};
int maxRetries = conf.maxIORetries();
if (maxRetries > 0) {
// Note this Fetcher will correctly handle maxRetries == 0; we avoid it just in case there's
// a bug in this code. We should remove the if statement once we're sure of the stability.
new RetryingBlockFetcher(conf, blockFetchStarter, blockIds, listener).start();
} else {
blockFetchStarter.createAndStart(blockIds, listener);
}
} catch (Exception e) {
logger.error("Exception while beginning fetchBlocks", e);
for (String blockId : blockIds) {
listener.onBlockFetchFailure(blockId, e);
}
}
}
@Override
public MetricSet shuffleMetrics() {
checkInit();
return clientFactory.getAllMetrics();
}
/**
* Registers this executor with an external shuffle server. This registration is required to
* inform the shuffle server about where and how we store our shuffle files.
*
* @param host Host of shuffle server.
* @param port Port of shuffle server.
* @param execId This Executor's id.
* @param executorInfo Contains all info necessary for the service to find our shuffle files.
*/
public void registerWithShuffleServer(
String host,
int port,
String execId,
ExecutorShuffleInfo executorInfo) throws IOException, InterruptedException {
checkInit();
try (TransportClient client = clientFactory.createClient(host, port)) {
ByteBuffer registerMessage = new RegisterExecutor(appId, execId, executorInfo).toByteBuffer();
client.sendRpcSync(registerMessage, registrationTimeoutMs);
}
}
public Future<Integer> removeBlocks(
String host,
int port,
String execId,
String[] blockIds) throws IOException, InterruptedException {
checkInit();
CompletableFuture<Integer> numRemovedBlocksFuture = new CompletableFuture<>();
ByteBuffer removeBlocksMessage = new RemoveBlocks(appId, execId, blockIds).toByteBuffer();
final TransportClient client = clientFactory.createClient(host, port);
client.sendRpc(removeBlocksMessage, new RpcResponseCallback() {
@Override
public void onSuccess(ByteBuffer response) {
try {
BlockTransferMessage msgObj = BlockTransferMessage.Decoder.fromByteBuffer(response);
numRemovedBlocksFuture.complete(((BlocksRemoved) msgObj).numRemovedBlocks);
} catch (Throwable t) {
logger.warn("Error trying to remove RDD blocks " + Arrays.toString(blockIds) +
" via external shuffle service from executor: " + execId, t);
numRemovedBlocksFuture.complete(0);
} finally {
client.close();
}
}
@Override
public void onFailure(Throwable e) {
logger.warn("Error trying to remove RDD blocks " + Arrays.toString(blockIds) +
" via external shuffle service from executor: " + execId, e);
numRemovedBlocksFuture.complete(0);
client.close();
}
});
return numRemovedBlocksFuture;
}
@Override
public void close() {
checkInit();
if (clientFactory != null) {
clientFactory.close();
clientFactory = null;
}
}
}<|fim▁end|>
|
import org.apache.spark.network.TransportContext;
import org.apache.spark.network.client.TransportClient;
import org.apache.spark.network.client.TransportClientBootstrap;
|
<|file_name|>timepicker.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { Routes, RouterModule } from '@angular/router';
import { NgbdtimepickerBasic } from './timepicker.component';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
const routes: Routes = [{
path: '',
data: {
title: 'Timepicker',
urls: [{title: 'Dashboard', url: '/'},{title: 'ngComponent'},{title: 'Timepicker'}]
},
component: NgbdtimepickerBasic
}];
@NgModule({
imports: [
FormsModule,
CommonModule,
ReactiveFormsModule,<|fim▁hole|> ],
declarations: [NgbdtimepickerBasic]
})
export class TimepickerModule { }<|fim▁end|>
|
NgbModule.forRoot(),
RouterModule.forChild(routes)
|
<|file_name|>trigrams.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Trigrams assignment"""
import re
import random
test_fragment = """
One night--it was on the twentieth of March, 1888--I was
returning from a journey to a patient (for I had now returned to
civil practice), when my way led me through Baker Street. As I
passed the well-remembered door, which must always be associated
in my mind with my wooing, and with the dark incidents of the
Study in Scarlet, I was seized with a keen desire to see Holmes
again, and to know how he was employing his extraordinary powers.
His rooms were brilliantly lit, and, even as I looked up, I saw
his tall, spare figure pass twice in a dark silhouette against
the blind. He was pacing the room swiftly, eagerly, with his head
sunk upon his chest and his hands clasped behind him. To me, who
knew his every mood and habit, his attitude and manner told their
own story. He was at work again. He had risen out of his
drug-created dreams and was hot upon the scent of some new
problem. I rang the bell and was shown up to the chamber which
had formerly been in part my own.
"""
wee_test = "one night--it was on the twentieth of March, 1888--I was returning from a journey to a patient"
def generate_trigram_clean(string):
"""
Generate a trigrams dictionary from a string.
This particular version ignores punctuation; i.e. it will pick n-grams
across punctuation boundaries, even if those boundaries are periods marks.
"""
bits = split_all(string)
# Clean bits such that it contains only words
bits = [bit for bit in bits if bit.isalpha()]
# Generate trigrams from bits list
trigrams = dict()
for i in range(len(bits) - 2):
key = " ".join(bits[i: i + 2])
try:
trigrams[key].append(bits[i + 2])
except KeyError:
trigrams[key] = []
trigrams[key].append(bits[i + 2])
return trigrams
def generate_trigram(string):
"""
Generate a trigrams dictionary from a string.
This particular version retains punctuation; i.e. it keys verbatim n-word
phrases with adjacent non-whitespace punctuation and returns values that
may contain ending punctuation as well.
"""
bits = string.split()
trigrams = dict()
for i in range(len(bits) - 2):
key = " ".join(bits[i: i + 2])
try:
trigrams[key].append(bits[i + 2])
except KeyError:
trigrams[key] = []
trigrams[key].append(bits[i + 2])
return trigrams
def translate_trigram(string, trigrams):
"""
Translate a string using a trigram dict
Starting with a version that looks for punctuation-matching keys.
"""
bits = string.split()
for i in range(len(bits) - 2):
key = " ".join(bits[i: i + 2])
try:
alternates = trigrams[key]<|fim▁hole|> bits[i + 2] = random.choice(alternates)
return " ".join(bits)
def read_file_as_string(filename):
with open(filename) as file:
epic_string = file.read()
return epic_string
def split_all(string, retain_whitespace=False):
"""
Split string such that alpha blocks are isalpha; retain whitespace if True
example
>>>str = "why, hello?!"
>>>print split(str)
['why', ',', 'hello', '?', '!']
>>>print str.split()
['why,', 'hello?!']
"""
if not retain_whitespace:
return re.findall(r"\w+|[^\w\s]", string, re.UNICODE)
else:
return re.findall(r"\w+|[^\w]", string, re.UNICODE)
def update_dictlist(dict1, dict2):
"""Update dict1 in-place using key, values in dict2 if key doesn't yet exist"""
for key, item in dict2.iteritems():
if not dict1.has_key(key):
dict1.setdefault(key, []).extend(item)
else:
pass
if __name__ == "__main__":
# Using sherlock.txt and two methods of generating trigrams to translate test_fragment
trigrams = generate_trigram(read_file_as_string('sherlock.txt'))
trigrams2 = generate_trigram_clean(read_file_as_string('sherlock.txt'))
update_dictlist(trigrams, trigrams2)
translated_test_fragment = translate_trigram(test_fragment, trigrams)
print translated_test_fragment<|fim▁end|>
|
except (NameError, KeyError):
pass
else:
|
<|file_name|>step-inspector.component.ts<|end_file_name|><|fim▁begin|>import {ChangeDetectorRef, Component, EventEmitter, Input, Output} from "@angular/core";
import {Workflow} from "cwl-svg";
import {StepModel, WorkflowModel} from "cwlts/models";
import {RawApp} from "../../../../../electron/src/sbg-api-client/interfaces/raw-app";
import {ErrorWrapper} from "../../../core/helpers/error-wrapper";
import {ErrorNotification, NotificationBarService} from "../../../layout/notification-bar/notification-bar.service";
import {StatusBarService} from "../../../layout/status-bar/status-bar.service";
import {PlatformRepositoryService} from "../../../repository/platform-repository.service";
import {ModalService} from "../../../ui/modal/modal.service";
import {DirectiveBase} from "../../../util/directive-base/directive-base";
import {UpdateStepModalComponent} from "../../update-step-modal/update-step-modal.component";
import {AppHelper} from "../../../core/helpers/AppHelper";
@Component({
selector: "ct-workflow-step-inspector",
styleUrls: ["./step-inspector.component.scss"],
template: `
<!--Update warning-->
<div class="alert alert-update form-control-label" *ngIf="step.hasUpdate">
A new version of this app is available!
<ng-container *ngIf="!readonly">
<button class="btn-unstyled p-0 update-entry" (click)="updateStep($event)">Update</button>
to get the latest changes.
</ng-container>
</div>
<!--View Modes-->
<ct-action-bar class="row workflow-step-inspector-tabs">
<ct-tab-selector class="full-width"
[distribute]="'equal'"
[active]="viewMode"
(activeChange)="changeTab($event)">
<ct-tab-selector-entry [tabName]="tabs.Info">
<span>App Info</span>
</ct-tab-selector-entry>
<ct-tab-selector-entry [tabName]="tabs.Inputs">
<span>Inputs</span>
</ct-tab-selector-entry>
<ct-tab-selector-entry [tabName]="tabs.Step">
<span>Step</span>
</ct-tab-selector-entry>
</ct-tab-selector>
</ct-action-bar>
<!--Inputs-->
<ct-workflow-step-inspector-inputs *ngIf="viewMode === tabs.Inputs"
[step]="step"
[inputs]="step.in"
[graph]="graph"
(change)="change.emit()"
[workflowModel]="workflowModel"
[readonly]="readonly">
</ct-workflow-step-inspector-inputs>
<!--Info-->
<ct-workflow-step-inspector-info *ngIf="viewMode === tabs.Info"
[step]="step">
</ct-workflow-step-inspector-info>
<!--Step-->
<ct-workflow-step-inspector-step *ngIf="viewMode === tabs.Step"
[step]="step"
[graph]="graph"
[workflowModel]="workflowModel"
(change)="change.emit()"
[readonly]="readonly">
</ct-workflow-step-inspector-step>
`
})
export class StepInspectorComponent extends DirectiveBase {
@Input()
readonly = false;
@Input()
step: StepModel;
@Input()
workflowModel: WorkflowModel;
@Input()
graph: Workflow;
@Input()
fileID: string;
@Output()
change = new EventEmitter();
tabs = {
Inputs: "inputs",
Info: "info",
Step: "step"
};
viewMode = this.tabs.Inputs;
constructor(private modal: ModalService,
private platformRepository: PlatformRepositoryService,
private cdr: ChangeDetectorRef,
private notificationBar: NotificationBarService,
private statusBar: StatusBarService) {
super();
}
updateStep(ev: Event) {
ev.preventDefault();
const appID = AppHelper.getAppIDWithRevision(this.step.run.customProps["sbg:id"], null);
const proc = this.statusBar.startProcess("Updating " + appID);
const modal = this.modal.fromComponent(UpdateStepModalComponent, {title: `Update ${appID}?`});
modal.step = this.step;
this.platformRepository.getApp(appID).then((app: RawApp) => {
this.statusBar.stopProcess(proc);
modal.updatedApp = app;<|fim▁hole|> modal.onSubmit = () => {
this.step.setRunProcess(app as any);
this.step.hasUpdate = false;
this.graph.redraw();
this.cdr.markForCheck();
this.cdr.detectChanges();
modal.closeModal();
};
}).catch(err => {
modal.closeModal();
this.statusBar.stopProcess(proc);
this.notificationBar.showNotification(new ErrorNotification(new ErrorWrapper(err).toString()));
});
}
changeTab(tab: string) {
this.viewMode = tab;
}
}<|fim▁end|>
|
modal.isLoading = false;
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from ._compat import unittest
from ._adapt import DEFAULT_URI, drop, IS_MSSQL, IS_IMAP, IS_GAE, IS_TERADATA, IS_ORACLE
from pydal import DAL, Field
from pydal._compat import PY2
@unittest.skipIf(IS_IMAP, "Reference not Null unsupported on IMAP")
@unittest.skipIf(IS_ORACLE, "Reference Not Null unsupported on Oracle")
class TestReferenceNOTNULL(unittest.TestCase):
# 1:N not null
def testRun(self):
for ref, bigint in [("reference", False), ("big-reference", True)]:
db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint)
if bigint and "big-id" not in db._adapter.types:
continue
db.define_table("tt", Field("vv"))
db.define_table(
"ttt", Field("vv"), Field("tt_id", "%s tt" % ref, notnull=True)
)
self.assertRaises(Exception, db.ttt.insert, vv="pydal")
# The following is mandatory for backends as PG to close the aborted transaction
db.commit()
drop(db.ttt)
drop(db.tt)
db.close()
@unittest.skipIf(IS_IMAP, "Reference Unique unsupported on IMAP")
@unittest.skipIf(IS_GAE, "Reference Unique unsupported on GAE")
@unittest.skipIf(IS_ORACLE, "Reference Unique unsupported on Oracle")
class TestReferenceUNIQUE(unittest.TestCase):
# 1:1 relation
def testRun(self):
for ref, bigint in [("reference", False), ("big-reference", True)]:
db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint)
if bigint and "big-id" not in db._adapter.types:
continue
db.define_table("tt", Field("vv"))
db.define_table(
"ttt",
Field("vv"),
Field("tt_id", "%s tt" % ref, unique=True),
Field("tt_uq", "integer", unique=True),
)
id_1 = db.tt.insert(vv="pydal")
id_2 = db.tt.insert(vv="pydal")
# Null tt_id
db.ttt.insert(vv="pydal", tt_uq=1)
# first insert is OK
db.ttt.insert(tt_id=id_1, tt_uq=2)
self.assertRaises(Exception, db.ttt.insert, tt_id=id_1, tt_uq=3)
self.assertRaises(Exception, db.ttt.insert, tt_id=id_2, tt_uq=2)
# The following is mandatory for backends as PG to close the aborted transaction
db.commit()
drop(db.ttt)
drop(db.tt)
db.close()
@unittest.skipIf(IS_IMAP, "Reference Unique not Null unsupported on IMAP")
@unittest.skipIf(IS_GAE, "Reference Unique not Null unsupported on GAE")
@unittest.skipIf(IS_ORACLE, "Reference Unique not Null unsupported on Oracle")
class TestReferenceUNIQUENotNull(unittest.TestCase):
# 1:1 relation not null
def testRun(self):
for ref, bigint in [("reference", False), ("big-reference", True)]:
db = DAL(DEFAULT_URI, check_reserved=["all"], bigint_id=bigint)
if bigint and "big-id" not in db._adapter.types:
continue
db.define_table("tt", Field("vv"))
db.define_table(
"ttt",
Field("vv"),
Field("tt_id", "%s tt" % ref, unique=True, notnull=True),
)
self.assertRaises(Exception, db.ttt.insert, vv="pydal")
db.commit()
id_i = db.tt.insert(vv="pydal")
# first insert is OK
db.ttt.insert(tt_id=id_i)
self.assertRaises(Exception, db.ttt.insert, tt_id=id_i)
# The following is mandatory for backends as PG to close the aborted transaction
db.commit()
drop(db.ttt)
drop(db.tt)
db.close()
@unittest.skipIf(IS_IMAP, "Skip unicode on IMAP")
@unittest.skipIf(IS_MSSQL and not PY2, "Skip unicode on py3 and MSSQL")
class TestUnicode(unittest.TestCase):
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("vv"))
vv = "ἀγοραζε"
id_i = db.tt.insert(vv=vv)
row = db(db.tt.id == id_i).select().first()
self.assertEqual(row.vv, vv)
db.commit()
drop(db.tt)
db.close()
class TestParseDateTime(unittest.TestCase):
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=["all"])
#: skip for adapters that use drivers for datetime parsing
if db._adapter.parser.registered.get("datetime") is None:
return
parse = lambda v: db._adapter.parser.parse(v, "datetime", "datetime")
dt = parse("2015-09-04t12:33:36.223245")
self.assertEqual(dt.microsecond, 223245)
self.assertEqual(dt.hour, 12)
dt = parse("2015-09-04t12:33:36.223245Z")
self.assertEqual(dt.microsecond, 223245)
self.assertEqual(dt.hour, 12)
dt = parse("2015-09-04t12:33:36.223245-2:0")
self.assertEqual(dt.microsecond, 223245)
self.assertEqual(dt.hour, 10)
dt = parse("2015-09-04t12:33:36+1:0")
self.assertEqual(dt.microsecond, 0)
self.assertEqual(dt.hour, 13)
dt = parse("2015-09-04t12:33:36.123")
self.assertEqual(dt.microsecond, 123000)
dt = parse("2015-09-04t12:33:36.00123")
self.assertEqual(dt.microsecond, 1230)
dt = parse("2015-09-04t12:33:36.1234567890")
self.assertEqual(dt.microsecond, 123456)
db.close()
<|fim▁hole|>
@unittest.skipIf(IS_IMAP, "chained join unsupported on IMAP")
@unittest.skipIf(IS_TERADATA, "chained join unsupported on TERADATA")
class TestChainedJoinUNIQUE(unittest.TestCase):
# 1:1 relation
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("aa", Field("name"))
db.define_table("bb", Field("aa", "reference aa"), Field("name"))
for k in ("x", "y", "z"):
i = db.aa.insert(name=k)
for j in ("u", "v", "w"):
db.bb.insert(aa=i, name=k + j)
db.commit()
rows = db(db.aa).select()
rows.join(db.bb.aa, fields=[db.bb.name], orderby=[db.bb.name])
self.assertEqual(rows[0].bb[0].name, "xu")
self.assertEqual(rows[0].bb[1].name, "xv")
self.assertEqual(rows[0].bb[2].name, "xw")
self.assertEqual(rows[1].bb[0].name, "yu")
self.assertEqual(rows[1].bb[1].name, "yv")
self.assertEqual(rows[1].bb[2].name, "yw")
self.assertEqual(rows[2].bb[0].name, "zu")
self.assertEqual(rows[2].bb[1].name, "zv")
self.assertEqual(rows[2].bb[2].name, "zw")
rows = db(db.bb).select()
rows.join(db.aa.id, fields=[db.aa.name])
self.assertEqual(rows[0].aa.name, "x")
self.assertEqual(rows[1].aa.name, "x")
self.assertEqual(rows[2].aa.name, "x")
self.assertEqual(rows[3].aa.name, "y")
self.assertEqual(rows[4].aa.name, "y")
self.assertEqual(rows[5].aa.name, "y")
self.assertEqual(rows[6].aa.name, "z")
self.assertEqual(rows[7].aa.name, "z")
self.assertEqual(rows[8].aa.name, "z")
rows_json = rows.as_json()
drop(db.bb)
drop(db.aa)
db.close()
class TestNullAdapter(unittest.TestCase):
# Test that NullAdapter can define tables
def testRun(self):
db = DAL(None)
db.define_table("no_table", Field("aa"))
self.assertIsInstance(db.no_table.aa, Field)
self.assertIsInstance(db.no_table["aa"], Field)
db.close()<|fim▁end|>
| |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>/*global module:false*/
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
meta: {
version: '2.0.0',
banner: '/*! Ebla - v<%= meta.version %> - ' +
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
'* Copyright (c) <%= grunt.template.today("yyyy") %> ' +
'Monospaced */'
},
concat: {
dist: {
src: ['<banner:meta.banner>',
'javascripts/libs/jquery.cookie.js',
'javascripts/ebla/ebla.js',
'javascripts/ebla/debug.js',
'javascripts/ebla/flash-message.js',
'javascripts/ebla/compatibility.js',
'javascripts/ebla/elements.js',
'javascripts/ebla/controls.js',
'javascripts/ebla/data.js',
'javascripts/ebla/images.js',
'javascripts/ebla/layout.js',<|fim▁hole|> 'javascripts/ebla/navigation.js',
'javascripts/ebla/navigation.event.js',
'javascripts/ebla/navigation.keyboard.js',
'javascripts/ebla/placesaver.js',
'javascripts/ebla/progress.js',
'javascripts/ebla/resize.js',
'javascripts/ebla/toc.js',
'javascripts/ebla/init.js',
'javascripts/ebla/book.js'],
dest: 'javascripts/ebla.js'
}
},
uglify: {
dist: {
src: ['<banner:meta.banner>', 'javascripts/ebla.js'],
dest: 'javascripts/ebla.min.js'
}
},
sass: {
dist: {
options: {
style: 'compressed'
},
files: {
'stylesheets/ebla.css': 'stylesheets/ebla.scss',
'stylesheets/book.css': 'stylesheets/book.scss'
}
}
},
watch: {
files: ['javascripts/ebla/*.js', 'stylesheets/*.scss'],
tasks: ['sass', 'concat', 'uglify']
},
jshint: {
files: ['Gruntfile.js', 'javascripts/ebla.js'],
options: {
curly: true,
eqeqeq: true,
immed: true,
latedef: true,
newcap: true,
noarg: true,
sub: true,
undef: true,
boss: true,
eqnull: true,
browser: true,
jquery: true,
devel: true,
globals: {
Modernizr: true,
debug: true,
bookData: true,
bookJson: true
}
},
}
});
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-sass');
// Default task.
grunt.registerTask('default', ['sass', 'concat', 'jshint', 'uglify']);
};<|fim▁end|>
|
'javascripts/ebla/loader.js',
|
<|file_name|>Venue.java<|end_file_name|><|fim▁begin|>package org.jboss.examples.ticketmonster.model;
import static javax.persistence.CascadeType.ALL;
import static javax.persistence.FetchType.EAGER;
import static javax.persistence.GenerationType.IDENTITY;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import org.hibernate.validator.constraints.NotEmpty;
/**
* <p>
* Represents a single venue
* </p>
*
* @author Shane Bryzak
* @author Pete Muir
*/
/*
* We suppress the warning about not specifying a serialVersionUID, as we are still developing this app, and want the JVM to
* generate the serialVersionUID for us. When we put this app into production, we'll generate and embed the serialVersionUID
*/
@SuppressWarnings("serial")
@Entity
public class Venue implements Serializable {
/* Declaration of fields */
/**
* The synthetic id of the object.
*/
@Id
@GeneratedValue(strategy = IDENTITY)
private Long id;
/**
* <p>
* The name of the event.
* </p>
*
* <p>
* The name of the event forms it's natural identity and cannot be shared between events.
* </p>
*
* <p>
* The name must not be null and must be one or more characters, the Bean Validation constrain <code>@NotEmpty</code>
* enforces this.
* </p>
*/
@Column(unique = true)
@NotEmpty
private String name;
/**
* The address of the venue
*/
private Address address = new Address();
/**
* A description of the venue
*/
private String description;
/**
* <p><|fim▁hole|> * The <code>@OneToMany<code> JPA mapping establishes this relationship. TODO Explain EAGER fetch.
* This relationship is bi-directional (a section knows which venue it is part of), and the <code>mappedBy</code>
* attribute establishes this. We cascade all persistence operations to the set of performances, so, for example if a venue
* is removed, then all of it's sections will also be removed.
* </p>
*/
@OneToMany(cascade = ALL, fetch = EAGER, mappedBy = "venue")
private Set<Section> sections = new HashSet<Section>();
/**
* The capacity of the venue
*/
private int capacity;
/**
* An optional media item to entice punters to the venue. The <code>@ManyToOne</code> establishes the relationship.
*/
@ManyToOne
private MediaItem mediaItem;
/* Boilerplate getters and setters */
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
public MediaItem getMediaItem() {
return mediaItem;
}
public void setMediaItem(MediaItem description) {
this.mediaItem = description;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Set<Section> getSections() {
return sections;
}
public void setSections(Set<Section> sections) {
this.sections = sections;
}
public int getCapacity() {
return capacity;
}
public void setCapacity(int capacity) {
this.capacity = capacity;
}
/* toString(), equals() and hashCode() for Venue, using the natural identity of the object */
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Venue venue = (Venue) o;
if (address != null ? !address.equals(venue.address) : venue.address != null)
return false;
if (name != null ? !name.equals(venue.name) : venue.name != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (address != null ? address.hashCode() : 0);
return result;
}
@Override
public String toString() {
return name;
}
}<|fim▁end|>
|
* A set of sections in the venue
* </p>
*
* <p>
|
<|file_name|>counts_table_sparse.py<|end_file_name|><|fim▁begin|># vim: fdm=indent
# author: Fabio Zanini
# date: 09/08/17
# content: Sparse table of gene counts
# Modules
import numpy as np
import pandas as pd
# Classes / functions
class CountsTableSparse(pd.SparseDataFrame):
'''Sparse table of gene expression counts
- Rows are features, e.g. genes.
- Columns are samples.
'''
_metadata = [
'name',
'_spikeins',
'_otherfeatures',
'_normalized',
'pseudocount',
'dataset',
]
_spikeins = ()
_otherfeatures = ()
_normalized = False
pseudocount = 0.1
dataset = None
@property
def _constructor(self):
return CountsTableSparse
@classmethod
def from_tablename(cls, tablename):
'''Instantiate a CountsTable from its name in the config file.
Args:
tablename (string): name of the counts table in the config file.
Returns:
CountsTable: the counts table.
'''
from ..config import config
from ..io import parse_counts_table_sparse
self = cls(parse_counts_table_sparse({'countsname': tablename}))
self.name = tablename
config_table = config['io']['count_tables'][tablename]
self._spikeins = config_table.get('spikeins', [])
self._otherfeatures = config_table.get('other', [])
self._normalized = config_table['normalized']
return self
@classmethod
def from_datasetname(cls, datasetname):
'''Instantiate a CountsTable from its name in the config file.
Args:
datasetename (string): name of the dataset in the config file.
Returns:
CountsTableSparse: the counts table.
'''
from ..config import config
from ..io import parse_counts_table_sparse
self = cls(parse_counts_table_sparse({'datasetname': datasetname}))
self.name = datasetname
config_table = config['io']['datasets'][datasetname]['counts_table']
self._spikeins = config_table.get('spikeins', [])
self._otherfeatures = config_table.get('other', [])
self._normalized = config_table['normalized']
return self
def to_npz(self, filename):
'''Save to numpy compressed file format'''
from .io.npz import to_counts_table_sparse
to_counts_table_sparse(self, filename)
def exclude_features(self, spikeins=True, other=True, inplace=False,
errors='raise'):
'''Get a slice that excludes secondary features.
Args:
spikeins (bool): Whether to exclude spike-ins
other (bool): Whether to exclude other features, e.g. unmapped reads
inplace (bool): Whether to drop those features in place.
errors (string): Whether to raise an exception if the features
to be excluded are already not present. Must be 'ignore'
or 'raise'.
Returns:
CountsTable: a slice of self without those features.
'''
drop = []
if spikeins:
drop.extend(self._spikeins)
if other:
drop.extend(self._otherfeatures)
out = self.drop(drop, axis=0, inplace=inplace, errors=errors)
if inplace and (self.dataset is not None):
self.dataset._featuresheet.drop(drop, inplace=True, errors=errors)
return out
def get_spikeins(self):
'''Get spike-in features
Returns:
CountsTable: a slice of self with only spike-ins.
'''
return self.loc[self._spikeins]
def get_other_features(self):
'''Get other features
Returns:
CountsTable: a slice of self with only other features (e.g.
unmapped).
'''
return self.loc[self._otherfeatures]
def log(self, base=10):
'''Take the pseudocounted log of the counts.
Args:
base (float): Base of the log transform
Returns:
A transformed CountsTableSparse with zeros at the zero-count items.
'''
from scipy.sparse import coo_matrix
coo = self.to_coo()
coobase = np.log(self.pseudocount) * coo_matrix((np.ones(coo.nnz), (coo.row, coo.col)), shape=coo.shape)
coolog = ((coo / self.pseudocount).log1p() + coobase) / np.log(base)
# NOTE: the entries that should be log(pseudocount) are zeros now
clog = CountsTableSparse(
coolog,
index=self.index,
columns=self.columns,
dtype=float,
default_fill_value=0)
return clog
def unlog(self, base=10):
'''Reverse the pseudocounted log of the counts.
Args:
base (float): Base of the log transform
Returns:
A transformed CountsTableSparse.
'''
from scipy.sparse import coo_matrix
coo = self.to_coo()
coobase = np.log(self.pseudocount) * coo_matrix((np.ones(coo.nnz), (coo.row, coo.col)), shape=coo.shape)
cooexp = (coo * np.log(base) - coobase).expm1() * self.pseudocount
cexp = CountsTableSparse(
cooexp,<|fim▁hole|> index=self.index,
columns=self.columns,
dtype=float,
default_fill_value=0)
return cexp
def normalize(
self,
method='counts_per_million',
include_spikeins=False,
**kwargs):
'''Normalize counts and return new CountsTable.
Args:
method (string or function): The method to use for normalization.
One of 'counts_per_million', 'counts_per_thousand_spikeins',
'counts_per_thousand_features'm 'counts_per_million_column'.
If this argument is a function, its signature depends on the
inplace argument. It must take the CountsTable as input and
return the normalized one as output. You can end your function
by self[:] = <normalized counts>.
include_spikeins (bool): Whether to include spike-ins in the
normalization and result.
inplace (bool): Whether to modify the CountsTable in place or
return a new one.
Returns:
A new, normalized CountsTableSparse.
NOTE: if method == 'counts_per_million_column', you have to use an
additional keyword argument called 'column' that specifies the column
of the samplesheet containing the normalization baseline. For instance,
if your samplesheet has a column called 'total_counts' that you want to
use for normalization, call:
CountsTableSparse.normalize(
method='counts_per_million_column',
column='total_counts')
This requires the count table to be linked to a Dataset.
'''
import copy
if method == 'counts_per_million':
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = counts.sum(axis=0)
counts_norm = 1e6 * counts / norm
elif method == 'counts_per_thousand_spikeins':
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.get_spikeins().sum(axis=0)
counts_norm = 1e3 * counts / norm
elif method == 'counts_per_thousand_features':
if 'features' not in kwargs:
raise ValueError('Set features=<list of normalization features>')
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.loc[kwargs['features']].sum(axis=0)
counts_norm = 1e3 * counts / norm
elif method == 'counts_per_million_column':
if 'column' not in kwargs:
raise ValueError('Specify a samplesheet column with column=<mycolumn>')
counts = self.exclude_features(spikeins=(not include_spikeins), other=True)
norm = self.dataset[kwargs['column']].values
counts_norm = 1e6 * counts / norm
elif callable(method):
counts_norm = method(self)
method = 'custom'
else:
raise ValueError('Method not understood')
# Shallow copy of metadata
for prop in self._metadata:
# dataset if special, to avoid infinite loops
if prop == 'dataset':
counts_norm.dataset = None
else:
setattr(counts_norm, prop, copy.copy(getattr(self, prop)))
counts_norm._normalized = method
return counts_norm
def get_statistics(self, axis='features', metrics=('mean', 'cv')):
'''Get statistics of the counts.
Args:
axis (str): 'features' or 'samples'
metrics (sequence of strings): any of 'mean', 'var', 'std', 'cv',
'fano', 'min', 'max'.
Returns:
pandas.DataFrame with features as rows and metrics as columns.
'''
if axis == 'features':
axn = 1
elif axis == 'samples':
axn = 0
else:
raise ValueError('axis must be features or samples')
st = {}
if 'mean' in metrics or 'cv' in metrics or 'fano' in metrics:
st['mean'] = self.mean(axis=axn)
if ('std' in metrics or 'cv' in metrics or 'fano' in metrics or
'var' in metrics):
st['std'] = self.std(axis=axn)
if 'var' in metrics:
st['var'] = st['std'] ** 2
if 'cv' in metrics:
st['cv'] = st['std'] / np.maximum(st['mean'], 1e-10)
if 'fano' in metrics:
st['fano'] = st['std'] ** 2 / np.maximum(st['mean'], 1e-10)
if 'min' in metrics:
st['min'] = self.min(axis=axn)
if 'max' in metrics:
st['max'] = self.max(axis=axn)
df = pd.concat([st[m] for m in metrics], axis=1)
df.columns = pd.Index(list(metrics), name='metrics')
return df<|fim▁end|>
| |
<|file_name|>comments.ts<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import {
Article,
Comment,
ICommentInstance, logger,
postProcessComment,
triggerMapFirst,
} from '@conversationai/moderator-backend-core';
import { enqueue, ISendCommentForScoringTaskData } from '@conversationai/moderator-backend-queue';
import * as Bluebird from 'bluebird';
/**
* Take a comment defintion and return a promise that either resolves
* with the model instance on succesful insertion or a rejection if the
* comment already exists.
*/
async function createCommentIfNew(commentData: any): Promise<ICommentInstance> {
// Verify article existence
let article = await Article.findOne({
where: {
sourceId: commentData.articleId,
},
});
if (!article) {
logger.info(`Article id ${commentData.articleId} doesn't exist, pulling it.`);
article = await triggerMapFirst('api.publisher.pullArticle', {
articleId: commentData.articleId,
});
if (!article) {
throw new Error(`Attempted to pull article ${commentData.articleId}, but it failed`);
}
}
// Force convert publisher data to conform to DB model
commentData.articleId = article.get('id');
commentData.sourceId = commentData.sourceId;
commentData.replyToSourceId = commentData.replyToSourceId;
commentData.authorSourceId = commentData.authorSourceId;
commentData.sourceCreatedAt = commentData.createdAt;
logger.info(`Find or create comment ${commentData.sourceId}`, commentData);
// If article exists, find/create comment
const [instance, created] = await Comment.findOrCreate({
where: {
sourceId: commentData.sourceId,
},
defaults: commentData,
});
if (created) {
logger.info(`Created comment ${instance.get('id')}`);
} else {
logger.info(`Found comment ${instance.get('id')}, not creating new record`);
return instance;
}
await postProcessComment(instance);
return instance;
}
/**<|fim▁hole|> return Bluebird.mapSeries(items, createCommentIfNew)
.then((createdComments) => Promise.resolve(createdComments));
}
/**
* Send the comments to the queue for scoring.
*/
export async function sendCommentsToScoringQueue(comments: Array<ICommentInstance>, runImmediately = false): Promise<void> {
for (const c of comments) {
await enqueue<ISendCommentForScoringTaskData>('sendCommentForScoring', {
commentId: c.get('id'),
}, runImmediately);
}
}<|fim▁end|>
|
* Given an array of comment data, return instances succesfully
* created that weren't duplicates.
*/
export function createComments(items: Array<any>): Bluebird<Array<ICommentInstance>> {
|
<|file_name|>Hex2OctExpression.java<|end_file_name|><|fim▁begin|>/******************************************************************************
* Copyright (c) 2000-2015 Ericsson Telecom AB
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
******************************************************************************/
package org.eclipse.titan.designer.AST.TTCN3.values.expressions;
import java.util.List;
import org.eclipse.titan.designer.AST.ASTVisitor;
import org.eclipse.titan.designer.AST.INamedNode;
import org.eclipse.titan.designer.AST.IReferenceChain;
import org.eclipse.titan.designer.AST.IValue;
import org.eclipse.titan.designer.AST.ReferenceFinder;
import org.eclipse.titan.designer.AST.Scope;
import org.eclipse.titan.designer.AST.Value;
import org.eclipse.titan.designer.AST.IType.Type_type;
import org.eclipse.titan.designer.AST.ReferenceFinder.Hit;
import org.eclipse.titan.designer.AST.TTCN3.Expected_Value_type;
import org.eclipse.titan.designer.AST.TTCN3.values.Expression_Value;
import org.eclipse.titan.designer.AST.TTCN3.values.Hexstring_Value;
import org.eclipse.titan.designer.AST.TTCN3.values.Octetstring_Value;
import org.eclipse.titan.designer.parsers.CompilationTimeStamp;
import org.eclipse.titan.designer.parsers.ttcn3parser.ReParseException;
import org.eclipse.titan.designer.parsers.ttcn3parser.TTCN3ReparseUpdater;
/**
* @author Kristof Szabados
* */
public final class Hex2OctExpression extends Expression_Value {
private static final String OPERANDERROR = "The operand of the `hex2oct' operation should be a hexstring value";
private final Value value;
public Hex2OctExpression(final Value value) {
this.value = value;
if (value != null) {
value.setFullNameParent(this);
}
}
@Override
public Operation_type getOperationType() {
return Operation_type.HEX2OCT_OPERATION;
}<|fim▁hole|> final StringBuilder builder = new StringBuilder();
builder.append("hex2oct(").append(value.createStringRepresentation()).append(')');
return builder.toString();
}
@Override
public void setMyScope(final Scope scope) {
super.setMyScope(scope);
if (value != null) {
value.setMyScope(scope);
}
}
@Override
public StringBuilder getFullName(final INamedNode child) {
final StringBuilder builder = super.getFullName(child);
if (value == child) {
return builder.append(OPERAND);
}
return builder;
}
@Override
public Type_type getExpressionReturntype(final CompilationTimeStamp timestamp, final Expected_Value_type expectedValue) {
return Type_type.TYPE_OCTETSTRING;
}
@Override
public boolean isUnfoldable(final CompilationTimeStamp timestamp, final Expected_Value_type expectedValue,
final IReferenceChain referenceChain) {
if (value == null) {
return true;
}
return value.isUnfoldable(timestamp, expectedValue, referenceChain);
}
/**
* Checks the parameters of the expression and if they are valid in
* their position in the expression or not.
*
* @param timestamp
* the timestamp of the actual semantic check cycle.
* @param expectedValue
* the kind of value expected.
* @param referenceChain
* a reference chain to detect cyclic references.
* */
private void checkExpressionOperands(final CompilationTimeStamp timestamp, final Expected_Value_type expectedValue,
final IReferenceChain referenceChain) {
if (value == null) {
return;
}
value.setLoweridToReference(timestamp);
Type_type tempType = value.getExpressionReturntype(timestamp, expectedValue);
switch (tempType) {
case TYPE_HEXSTRING:
value.getValueRefdLast(timestamp, expectedValue, referenceChain);
return;
case TYPE_UNDEFINED:
setIsErroneous(true);
return;
default:
if (!isErroneous) {
location.reportSemanticError(OPERANDERROR);
setIsErroneous(true);
}
return;
}
}
@Override
public IValue evaluateValue(final CompilationTimeStamp timestamp, final Expected_Value_type expectedValue,
final IReferenceChain referenceChain) {
if (lastTimeChecked != null && !lastTimeChecked.isLess(timestamp)) {
return lastValue;
}
isErroneous = false;
lastTimeChecked = timestamp;
lastValue = this;
if (value == null) {
return lastValue;
}
checkExpressionOperands(timestamp, expectedValue, referenceChain);
if (getIsErroneous(timestamp)) {
return lastValue;
}
if (isUnfoldable(timestamp, referenceChain)) {
return lastValue;
}
IValue last = value.getValueRefdLast(timestamp, referenceChain);
if (last.getIsErroneous(timestamp)) {
setIsErroneous(true);
return lastValue;
}
switch (last.getValuetype()) {
case HEXSTRING_VALUE:
String temp = ((Hexstring_Value) last).getValue();
lastValue = new Octetstring_Value(hex2oct(temp));
lastValue.copyGeneralProperties(this);
break;
default:
setIsErroneous(true);
break;
}
return lastValue;
}
public static String hex2oct(final String hexString) {
if (hexString.length() % 2 == 0) {
return hexString;
}
return new StringBuilder(hexString.length() + 1).append('0').append(hexString).toString();
}
@Override
public void checkRecursions(final CompilationTimeStamp timestamp, final IReferenceChain referenceChain) {
if (referenceChain.add(this) && value != null) {
referenceChain.markState();
value.checkRecursions(timestamp, referenceChain);
referenceChain.previousState();
}
}
@Override
public void updateSyntax(final TTCN3ReparseUpdater reparser, final boolean isDamaged) throws ReParseException {
if (isDamaged) {
throw new ReParseException();
}
if (value != null) {
value.updateSyntax(reparser, false);
reparser.updateLocation(value.getLocation());
}
}
@Override
public void findReferences(final ReferenceFinder referenceFinder, final List<Hit> foundIdentifiers) {
if (value == null) {
return;
}
value.findReferences(referenceFinder, foundIdentifiers);
}
@Override
protected boolean memberAccept(final ASTVisitor v) {
if (value != null && !value.accept(v)) {
return false;
}
return true;
}
}<|fim▁end|>
|
@Override
public String createStringRepresentation() {
|
<|file_name|>entity.js<|end_file_name|><|fim▁begin|>'use strict'
/* libraries */
var Sequelize = require('sequelize')
/* own code */
var Attribute = require('./attribute')
/**
* Parse DEM entity and create Sequelize definition for the table itself.
* @constructor
*/
function Entity() {
this._parserAttr = new Attribute()
}
/**
* Input JSON (DEM entity):
{
"id": "Person",
"alias": "person",
"comment": "Person basic entity with 2 attributes.",
"attributes": [...]
}
*
* Result data:
{
table: "NameFirst",<|fim▁hole|> options: {...}
}
*
* See http://sequelize.readthedocs.org/en/latest/docs/models-definition/#configuration
*
* @param jsDem
* @param seqModel
* @return {{}}
* @private
*/
Entity.prototype.parseJson = function _parseJson(jsDem, seqModel) {
var result = {table: '', columns: {}, options: {}};
/* process common properties */
result.table = jsDem.id
var options = result.options
/* parse columns */
if (jsDem.attributes) {
var demAttrs = jsDem.attributes
var columns = result.columns
var i, len, parsedAttr;
for (i = 0, len = demAttrs.length; i < len; ++i) {
parsedAttr = this._parserAttr.parseJson(demAttrs[i])
columns[parsedAttr.column] = parsedAttr.definition
}
}
return result
}
module.exports = Entity<|fim▁end|>
|
columns: {...},
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for news_project project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from django.core.mail import send_mail
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dtou6-c)r2@t$p2tudrq2gjy92wsfdkst2yng^5y-akom$$f13'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ACCOUNT_ACTIVATION_DAYS = 7
LOGIN_REDIRECT_URL = 'home'
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'news_project',
'articles',
'profiles',
'taggit'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'news_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'news_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'newssitedb',
'USER': os.environ.get('DB_USER', ''),
'PASSWORD': os.environ.get("DB_PASSWORD", ''),
'HOST': '127.0.0.1',
'PORT': '5432',
'TEST': {
'NAME': 'IMAGER_TEST_DB'
}
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{<|fim▁hole|> 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
# Email Settings
# EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_PORT = 587
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = os.environ.get('EM_PASS', '')
SERVER_EMAIL = '[email protected]'
DEFAULT_FROM_EMAIL = "News Project"<|fim▁end|>
|
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
|
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class InvalidDuration(Exception):
pass
class InvalidTag(Exception):
pass
class InvalidID3TagVersion(Exception):
pass
class CouldntDecodeError(Exception):
pass<|fim▁end|>
|
class TooManyMissingFrames(Exception):
pass
|
<|file_name|>bdmplot2_callback.py<|end_file_name|><|fim▁begin|># Copyright (c) 2008-2010, Regents of the University of Colorado.
# This work was supported by NASA contracts NNJ05HE10G, NNC06CB40C, and
# NNC07CB47C.
# This library is free software. You can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, version 2.1 of the License.
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details. A copy of the GNU
# Lesser General Public License v 2.1 can be found in the file named
# "COPYING.LESSER". You should have received a copy of the GNU Lesser
# General Public License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA.
# You may contact the Automation Group at:
# [email protected]
# Dr. Kevin Gifford
# University of Colorado
# Engineering Center, ECAE 1B08
# Boulder, CO 80309
# Because BioNet was developed at a university, we ask that you provide
# attribution to the BioNet authors in any redistribution, modification,
# work, or article based on this library.
# You may contribute modifications or suggestions to the University of
# Colorado for the purpose of discussing and improving this software.
# Before your modifications are incorporated into the master version
# distributed by the University of Colorado, we must have a contributor
# license agreement on file from each contributor. If you wish to supply
# the University with your modifications, please join our mailing list.<|fim▁hole|>from bdm_client import *
from timespan import timeval_to_int
import time
sessions = {}
bionet_resources = {}
#callbacks
def cb_lost_bdm(bdm, user_data):
None
def cb_new_bdm(bdm, user_data):
None
def cb_lost_hab(hab, user_data):
for i in range(0, bionet_hab_num_nodes(hab)):
node = bionet_hab_get_node_by_index(hab, i)
for j in range(0, bionet_node_get_num_resources(node)):
resource = bionet_node_get_resource_by_index(node, j)
pybionet_set_user_data(resource, None)
print("lost hab: " + bionet_hab_get_type(hab) + "." + bionet_hab_get_id(hab))
def cb_new_hab(hab, user_data):
print("new hab: " + bionet_hab_get_type(hab) + "." + bionet_hab_get_id(hab))
def cb_new_node(node, user_data):
hab = bionet_node_get_hab(node)
print("new node: " + bionet_node_get_name(node))
if (bionet_node_get_num_resources(node)):
print(" Resources:")
for i in range(bionet_node_get_num_resources(node)):
resource = bionet_node_get_resource_by_index(node, i)
datapoint = bionet_resource_get_datapoint_by_index(resource, 0)
if (datapoint == None):
print(" " + bionet_resource_data_type_to_string(bionet_resource_get_data_type(resource)) + " " + bionet_resource_flavor_to_string(bionet_resource_get_flavor(resource)) + " " + bionet_resource_get_id(resource) + ": (no known value)")
else:
value_str = bionet_value_to_str(bionet_datapoint_get_value(datapoint));
#%s %s %s = %s @ %s
print(" " + bionet_resource_data_type_to_string(bionet_resource_get_data_type(resource)) + " " + bionet_resource_flavor_to_string(bionet_resource_get_flavor(resource)) + " " + bionet_resource_get_id(resource) + " = " + value_str + " @ " + bionet_datapoint_timestamp_to_string(datapoint))
if (bionet_node_get_num_streams(node)):
print(" Streams:")
for i in range(bionet_node_get_num_streams(node)):
stream = bionet_node_get_stream_by_index(node, i)
print(" " + bionet_stream_get_id(stream) + " " + bionet_stream_get_type(stream) + " " + bionet_stream_direction_to_string(bionet_stream_get_direction(stream)))
def cb_lost_node(node, userdata):
hab = bionet_node_get_hab(node)
for j in range(0, bionet_node_get_num_resources(node)):
resource = bionet_node_get_resource_by_index(node, j)
pybionet_set_user_data(resource, None)
print("lost node: " + bionet_node_get_name(node))
def cb_datapoint(datapoint, userdata):
value = bionet_datapoint_get_value(datapoint);
resource = bionet_value_get_resource(value);
node = bionet_resource_get_node(resource);
hab = bionet_node_get_hab(node);
value_str = bionet_value_to_str(value);
#"%s.%s.%s:%s = %s %s %s @ %s"
#print(bionet_resource_get_name(resource) + " = " + bionet_resource_data_type_to_string(bionet_resource_get_data_type(resource)) + " " + bionet_resource_flavor_to_string(bionet_resource_get_flavor(resource)) + " " + value_str + " @ " + bionet_datapoint_timestamp_to_string(datapoint))
now = time.time()
removal = []
resource_name = bionet_resource_get_name(resource)
dp = (timeval_to_int(bionet_datapoint_get_timestamp(datapoint)), value_str)
for session_id, session in sessions.iteritems():
found = False
for r in session['resource']:
if (bionet_resource_name_matches(resource_name, r)):
for name in session['bionet-resources']:
if (name == resource_name):
u = bionet_resources[name]
if (None == u) or ('datapoints' not in u) or ('sessions' not in u): # no user data is set yet
u = { 'datapoints' : [ dp ], 'sessions' : { session_id : [ dp ] } }
bionet_resources[name] = u
print "Added datapoint to new user data"
else: # user data is set, just append to it
u['datapoints'].append(dp)
if session_id in u['sessions']:
u['sessions'][session_id].append(dp)
else:
u['sessions'][session_id] = [ dp ]
#print "Added datapoint to existing user data"
found = True
if (False == found):
session['bionet-resources'].append(resource_name)
u = { 'datapoints' : [ dp ], 'sessions' : { session_id : [ dp ] } }
bionet_resources[resource_name] = u
#print "Added datapoint to new user data of new resource"
if (now > (session['last requested'] + 600)):
# this session hasn't been requested in more than 10 minutes. remove it
removal.append(session_id)
for session_id in removal:
#print "removed subscription ", sessions[session_id]['resource']
del sessions[session_id]
# TODO: unsubscribe when bdm_unsubscribe() is implemented<|fim▁end|>
|
# Instructions can be found on our website at
# http://bioserve.colorado.edu/developers-corner.
|
<|file_name|>0004_auto_20171004_2323.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-04 21:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("peering", "0003_auto_20170903_1235")]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_as_set",
field=models.CharField(blank=True, max_length=128, null=True),<|fim▁hole|> migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_max_prefixes",
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_as_set",
field=models.CharField(blank=True, max_length=128, null=True),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_max_prefixes",
field=models.PositiveIntegerField(blank=True, null=True),
),
]<|fim▁end|>
|
),
|
<|file_name|>drmaa_wrapper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import print_function
################################################################################
#
#
# drmaa_wrapper.py
#
# Copyright (C) 2013 Leo Goodstadt
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Portions of code from adapted from:
#
# http://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python
# Courtesy of J.F. Sebastian
# Use is licensed under the "Creative Commons Attribution Share Alike license"
# See http://stackexchange.com/legal
#
#################################################################################
"""
********************************************
:mod:`ruffus.cmdline` -- Overview
********************************************
.. moduleauthor:: Leo Goodstadt <[email protected]>
#
# Using drmaa
#
from ruffus import *
import drmaa_wrapper
"""
import sys, os
import stat
#
# tempfile for drmaa scripts
#
import tempfile
import datetime
import subprocess
import time
import sys
import subprocess
import threading
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty # python 3.x
ON_POSIX = 'posix' in sys.builtin_module_names
if sys.hexversion >= 0x03000000:
# everything is unicode in python3
path_str_type = str
else:
path_str_type = basestring
#_________________________________________________________________________________________
# error_drmaa_job
#_________________________________________________________________________________________
class error_drmaa_job(Exception):
"""
All exceptions throw in this module
"""
def __init__(self, *errmsg):
Exception.__init__(self, *errmsg)
#_________________________________________________________________________________________
# read_stdout_stderr_from_files
#_________________________________________________________________________________________
def read_stdout_stderr_from_files( stdout_path, stderr_path, logger = None, cmd_str = "", tries=5):
"""
Reads the contents of two specified paths and returns the strings
Thanks to paranoia approach contributed by Andreas Heger:
Retry just in case file system hasn't committed.
Logs error if files are missing: No big deal?
Cleans up files afterwards
Returns tuple of stdout and stderr.
"""
#
# delay up to 10 seconds until files are ready
#
for xxx in range(tries):
if os.path.exists( stdout_path ) and os.path.exists( stderr_path ):
break
time.sleep(2)
try:
stdout = open( stdout_path, "r" ).readlines()
except IOError:
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
msg = str(exceptionValue)
if logger:
logger.warning( "could not open stdout: %s for \n%s" % (msg, cmd_str))
stdout = []
try:
stderr = open( stderr_path, "r" ).readlines()
except IOError:
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
msg = str(exceptionValue)
if logger:
logger.warning( "could not open stderr: %s for \n%s" % (msg, cmd_str))
stderr = []
#
# cleanup ignoring errors
#
try:
os.unlink( stdout_path )
os.unlink( stderr_path )
except OSError:
pass
return stdout, stderr
#_________________________________________________________________________________________
# setup_drmaa_job
#_________________________________________________________________________________________
def setup_drmaa_job( drmaa_session, job_name, job_environment, working_directory, job_other_options):
job_template = drmaa_session.createJobTemplate()
if not working_directory:
job_template.workingDirectory = os.getcwd()
else:
job_template.workingDirectory = working_directory
if job_environment:
# dictionary e.g. { 'BASH_ENV' : '~/.bashrc' }
job_template.jobEnvironment = job_environment
job_template.args = []
if job_name:
job_template.jobName = job_name
else:
# nameless jobs sometimes breaks drmaa implementations...
job_template.jobName = "ruffus_job_" + "_".join(map(str, datetime.datetime.now().timetuple()[0:6]))
#
# optional job parameters
#
job_template.nativeSpecification = job_other_options
# separate stdout and stderr
job_template.joinFiles=False
return job_template
#_________________________________________________________________________________________
# write_job_script_to_temp_file
#_________________________________________________________________________________________
def write_job_script_to_temp_file( cmd_str, job_script_directory, job_name, job_other_options, job_environment, working_directory):
'''
returns (job_script_path, stdout_path, stderr_path)
'''
import sys
time_stmp_str = "_".join(map(str, datetime.datetime.now().timetuple()[0:6]))
# create script directory if necessary
# Ignore errors rather than test for existence to avoid race conditions
try:
os.makedirs(job_script_directory)
except:
pass
tmpfile = tempfile.NamedTemporaryFile(mode='w', prefix='drmaa_script_' + time_stmp_str + "__", dir = job_script_directory, delete = False)
#
# hopefully #!/bin/sh is universally portable among unix-like operating systems
#
tmpfile.write( "#!/bin/sh\n" )
#
# log parameters as suggested by Bernie Pope
#
for title, parameter in ( ("job_name", job_name, ),
("job_other_options", job_other_options,),
("job_environment", job_environment, ),
("working_directory", working_directory), ):
if parameter:
tmpfile.write( "#%s=%s\n" % (title, parameter))
tmpfile.write( cmd_str + "\n" )
tmpfile.close()
job_script_path = os.path.abspath( tmpfile.name )
stdout_path = job_script_path + ".stdout"
stderr_path = job_script_path + ".stderr"
os.chmod( job_script_path, stat.S_IRWXG | stat.S_IRWXU )
return (job_script_path, stdout_path, stderr_path)
#_________________________________________________________________________________________
# run_job_using_drmaa
#_________________________________________________________________________________________
def run_job_using_drmaa (cmd_str, job_name = None, job_other_options = "", job_script_directory = None, job_environment = None, working_directory = None, retain_job_scripts = False, logger = None, drmaa_session = None, verbose = 0):
"""
Runs specified command remotely using drmaa,
either with the specified session, or the module shared drmaa session
"""
import drmaa
#
# used specified session else module session
#
if drmaa_session is None:
raise error_drmaa_job( "Please specify a drmaa_session in run_job()")
#
# make job template
#
job_template = setup_drmaa_job( drmaa_session, job_name, job_environment, working_directory, job_other_options)
#
# make job script
#
if not job_script_directory:
job_script_directory = os.getcwd()
job_script_path, stdout_path, stderr_path = write_job_script_to_temp_file( cmd_str, job_script_directory, job_name, job_other_options, job_environment, working_directory)
job_template.remoteCommand = job_script_path
# drmaa paths specified as [hostname]:file_path.
# See http://www.ogf.org/Public_Comment_Docs/Documents/2007-12/ggf-drmaa-idl-binding-v1%2000%20RC7.pdf
job_template.outputPath = ":" + stdout_path
job_template.errorPath = ":" + stderr_path
#
# Run job and wait
#
jobid = drmaa_session.runJob(job_template)
if logger:
logger.debug( "job has been submitted with jobid %s" % str(jobid ))
try:
job_info = drmaa_session.wait(jobid, drmaa.Session.TIMEOUT_WAIT_FOREVER)
except Exception:
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
msg = str(exceptionValue)
# ignore message 24 in PBS
# code 24: drmaa: Job finished but resource usage information and/or termination status could not be provided.":
if not msg.startswith("code 24"): raise
if logger:
logger.info("Warning %s\n"
"The original command was:\n%s\njobid=jobid\n"
(msg.message, cmd_str,jobid) )
job_info = None
#
# Read output
#
stdout, stderr = read_stdout_stderr_from_files( stdout_path, stderr_path, logger, cmd_str)
job_info_str = ("The original command was: >> %s <<\n"
"The jobid was: %s\n"
"The job script name was: %s\n" %
(cmd_str,
jobid,
job_script_path))
def stderr_stdout_to_str (stderr, stdout):
"""
Concatenate stdout and stderr to string
"""
result = ""
if stderr:
result += "The stderr was: \n%s\n\n" % ("".join( stderr))
if stdout:
result += "The stdout was: \n%s\n\n" % ("".join( stdout))
return result
#
# Throw if failed
#
if job_info:
job_info_str += "Resources used: %s " % (job_info.resourceUsage)
if job_info.wasAborted:
raise error_drmaa_job( "The drmaa command was never ran but used %s:\n%s"
% (job_info.exitStatus, job_info_str + stderr_stdout_to_str (stderr, stdout)))
elif job_info.hasSignal:
raise error_drmaa_job( "The drmaa command was terminated by signal %i:\n%s"
% (job_info.exitStatus, job_info_str + stderr_stdout_to_str (stderr, stdout)))
elif job_info.hasExited:
if job_info.exitStatus:
raise error_drmaa_job( "The drmaa command was terminated by signal %i:\n%s"
% (job_info.exitStatus, job_info_str + stderr_stdout_to_str (stderr, stdout)))
#
# Decorate normal exit with some resource usage information
#
elif verbose:
def nice_mem_str(num):
"""
Format memory sizes
http://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size
"""
num = float(num)
for x in ['bytes','KB','MB','GB']:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
return "%3.1f%s" % (num, 'TB')<|fim▁hole|> try:
resource_usage_str = []
if 'maxvmem' in job_info.resourceUsage:
if 'mem' in job_info.resourceUsage:
resource_usage_str.append("Mem=%s(%s)" % (nice_mem_str(job_info.resourceUsage['maxvmem']), job_info.resourceUsage['mem']))
else:
resource_usage_str.append("Mem=%s" % nice_mem_str(job_info.resourceUsage['maxvmem']))
if 'ru_wallclock' in job_info.resourceUsage:
resource_usage_str.append("CPU wallclock= %.2gs" % float(job_info.resourceUsage['ru_wallclock']))
if len(resource_usage_str):
logger.info("Drmaa command used %s in running %s" % (", ".join(resource_usage_str), cmd_str))
else:
logger.info("Drmaa command successfully ran %s" % cmd_str)
except:
logger.info("Drmaa command used %s in running %s" % (job_info.resourceUsage, cmd_str))
#
# clean up job template
#
drmaa_session.deleteJobTemplate(job_template)
#
# Cleanup job script unless retain_job_scripts is set
#
if retain_job_scripts:
# job scripts have the jobid as an extension
os.rename(job_script_path, job_script_path + ".%s" % jobid )
else:
try:
os.unlink( job_script_path )
except OSError:
if logger:
logger.warning( "Temporary job script wrapper '%s' missing (and ignored) at clean-up" % job_script_path )
return stdout, stderr
def enqueue_output(out, queue, echo):
for line in iter(out.readline, ''):
queue.put(line)
if echo is not None:
echo.write(line)
echo.flush()
out.close()
#_________________________________________________________________________________________
# run_job_locally
#_________________________________________________________________________________________
def run_job_locally (cmd_str, logger = None, job_environment = None, working_directory = None, local_echo = False):
"""
Runs specified command locally instead of drmaa
"""
popen_params = {"args" : cmd_str,
"cwd" : working_directory if working_directory is not None else os.getcwd(),
"shell" : True,
"stdin" : subprocess.PIPE,
"stdout" : subprocess.PIPE,
"stderr" : subprocess.PIPE,
"bufsize" :1,
"universal_newlines" : True,
"close_fds" : ON_POSIX}
if job_environment is not None:
popen_params["env"] = job_environment
process = subprocess.Popen( **popen_params )
stderrQ = Queue()
stdoutQ = Queue()
stdout_t = threading.Thread(target=enqueue_output, args=(process.stdout, stdoutQ, sys.stdout if local_echo else None))
stderr_t = threading.Thread(target=enqueue_output, args=(process.stderr, stderrQ, sys.stderr if local_echo else None))
# if daemon = False, sub process cannot be interrupted by Ctrl-C
stdout_t.daemon = True
stderr_t.daemon = True
stdout_t.start()
stderr_t.start()
process.wait()
stdout_t.join()
stderr_t.join()
process.stdin.close()
process.stdout.close()
process.stderr.close()
stdout, stderr = [], []
try:
while True:
stdout.append(stdoutQ.get(False))
except:
pass
try:
while True:
stderr.append(stderrQ.get(False))
except:
pass
if process.returncode != 0:
raise error_drmaa_job( "The locally run command was terminated by signal %i:\n"
"The original command was:\n%s\n"
"The stderr was: \n%s\n\n"
"The stdout was: \n%s\n\n" %
(-process.returncode, cmd_str, "".join(stderr), "".join(stdout)) )
return stdout, stderr
#_________________________________________________________________________________________
# touch_output_files
#_________________________________________________________________________________________
def touch_output_files (cmd_str, output_files, logger = None):
"""
Touches output files instead of actually running the command string
"""
if not output_files or not len(output_files):
if logger:
logger.debug("No output files to 'touch' for command:\n%s")
return
# make sure is list
ltypes=(list, tuple)
if not isinstance(output_files, ltypes):
output_files = [output_files]
else:
output_files = list(output_files)
#
# flatten list of file names
# from http://rightfootin.blogspot.co.uk/2006/09/more-on-python-flatten.html
#
i = 0
while i < len(output_files):
while isinstance(output_files[i], ltypes):
if not output_files[i]:
output_files.pop(i)
i -= 1
break
else:
output_files[i:i + 1] = output_files[i]
i += 1
for f in output_files:
# ignore non strings
if not isinstance (f, path_str_type):
continue
# create file
if not os.path.exists(f):
# this should be guaranteed to close the new file immediately?
with open(f, 'w') as p: pass
# touch existing file
else:
os.utime(f, None)
#_________________________________________________________________________________________
# run_job
#_________________________________________________________________________________________
def run_job(cmd_str, job_name = None, job_other_options = None, job_script_directory = None,
job_environment = None, working_directory = None, logger = None,
drmaa_session = None, retain_job_scripts = False,
run_locally = False, output_files = None, touch_only = False, verbose = 0, local_echo = False):
"""
Runs specified command either using drmaa, or locally or only in simulation (touch the output files only)
"""
if touch_only:
touch_output_files (cmd_str, output_files, logger)
return "","",
if run_locally:
return run_job_locally (cmd_str, logger, job_environment, working_directory, local_echo)
return run_job_using_drmaa (cmd_str, job_name, job_other_options, job_script_directory, job_environment, working_directory, retain_job_scripts, logger, drmaa_session, verbose)<|fim▁end|>
| |
<|file_name|>OverLoadMe.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from ..internal.misc import json
from ..internal.MultiAccount import MultiAccount
class OverLoadMe(MultiAccount):
__name__ = "OverLoadMe"
__type__ = "account"
__version__ = "0.13"<|fim▁hole|> __status__ = "testing"
__config__ = [("mh_mode", "all;listed;unlisted", "Filter hosters to use", "all"),
("mh_list", "str", "Hoster list (comma separated)", ""),
("mh_interval", "int", "Reload interval in hours", 12)]
__description__ = """Over-Load.me account plugin"""
__license__ = "GPLv3"
__authors__ = [("marley", "[email protected]")]
def grab_hosters(self, user, password, data):
html = self.load("https://api.over-load.me/hoster.php",
get={'auth': "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"})
return [x for x in map(
str.strip, html.replace("\"", "").split(",")) if x]
def grab_info(self, user, password, data):
html = self.load("https://api.over-load.me/account.php",
get={'user': user,
'auth': password}).strip()
data = json.loads(html)
self.log_debug(data)
#: Check for premium
if data['membership'] == "Free":
return {'premium': False, 'validuntil': None, 'trafficleft': None}
else:
return {'premium': True,
'validuntil': data['expirationunix'],
'trafficleft': -1}
def signin(self, user, password, data):
html = self.load("https://api.over-load.me/account.php",
get={'user': user,
'auth': password}).strip()
data = json.loads(html)
if data['err'] == 1:
self.fail_login()<|fim▁end|>
| |
<|file_name|>RapidMinerTestCase.java<|end_file_name|><|fim▁begin|>/*
* RapidMiner
*
* Copyright (C) 2001-2011 by Rapid-I and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapid-i.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.test;
import com.rapidminer.tools.LogService;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
/**
* Extends the JUnit test case by a method for asserting equality of doubles
* with respect to Double.NaN
*
* @author Simon Fischer
<|fim▁hole|> */
public class RapidMinerTestCase extends TestCase {
public RapidMinerTestCase() {
super();
}
public RapidMinerTestCase(String name) {
super(name);
}
@Override
public void setUp() throws Exception {
super.setUp();
LogService.getGlobal().setVerbosityLevel(LogService.WARNING);
}
public void assertEqualsNaN(String message, double expected, double actual) {
if (Double.isNaN(expected)) {
if (!Double.isNaN(actual)) {
throw new AssertionFailedError(message + " expected: <" + expected + "> but was: <" + actual + ">");
}
} else {
assertEquals(message, expected, actual, 0.000000001);
}
}
}<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|> name = 'iMX233_GPIO',
version = '0.1.0',
author = 'Stefan Mavrodiev',
author_email = '[email protected]',
url = 'https://www.olimex.com/',
license = 'MIT',
description = 'Control GPIOs on iMX233-OLinuXino.',
long_description = open('README.txt').read() + open('CHANGES.txt').read(),
classifiers = [ 'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Home Automation',
'Topic :: Software Development :: Embedded Systems'
],
ext_modules = [Extension('iMX233_GPIO', ['source/imx233.c'])],
package_dir={'': 'source'},
packages=[''],
)<|fim▁end|>
|
from distutils.core import setup, Extension
setup(
|
<|file_name|>RemoteSpecParser.py<|end_file_name|><|fim▁begin|># ####################################################################
# gofed - set of tools to automize packaging of golang devel codes
# Copyright (C) 2014 Jan Chaloupka, [email protected]<|fim▁hole|>#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ####################################################################
###################################################################
# TODO:
# [ ] - detect more import paths/sources in spec file?
# [ ] - detect from %files every build, analyze its content (downloading it from koji by detecting its name
# from spec file => no koji latest-builds, which packages/builds are no arch, which are arch specific (el6 beast)
# [ ] - all provides of source code import must in a form golang(import_path/...)
# [ ] - what files/provides are optional, which should not be in provides (test files, example, ...)
# [ ] - golang imports of examples are optional
###################################################################
import tempfile
from Utils import runCommand
from SpecParser import SpecParser
from Base import Base
class RemoteSpecParser(Base):
def __init__(self, branch, package):
Base.__init__(self)
self.branch = branch
self.package = package
self.sp_obj = None
def parse(self):
f = tempfile.NamedTemporaryFile(delete=True)
cmd_str = "curl http://pkgs.fedoraproject.org/cgit/rpms/%s.git/plain/%s.spec > %s"
runCommand(cmd_str % (self.package, self.package, f.name))
self.sp_obj = SpecParser(f.name)
if not self.sp_obj.parse():
self.err = self.sp_obj.getError()
f.close()
return False
f.close()
return True
def getProvides(self):
"""Fetch a spec file from pkgdb and get provides from all its [sub]packages"""
if self.sp_obj == None:
return {}
return self.sp_obj.getProvides()
def getPackageCommits(self):
if self.sp_obj == None:
return ""
return self.sp_obj.getMacro("commit")
def getPkgURL(self):
if self.sp_obj == None:
return ""
return self.sp_obj.getTag("url")<|fim▁end|>
| |
<|file_name|>api_v1.py<|end_file_name|><|fim▁begin|>from flask import Blueprint
import flask_restx
from flask_restx import Resource
from flask import request
# import subprocess
# from os import path
# from flask import redirect
from sanskrit_parser.base.sanskrit_base import SanskritObject, SLP1
from sanskrit_parser.parser.sandhi_analyzer import LexicalSandhiAnalyzer
from sanskrit_parser import __version__
from sanskrit_parser import Parser
URL_PREFIX = '/v1'
api_blueprint = Blueprint(
'sanskrit_parser', __name__,
template_folder='templates'
)
api = flask_restx.Api(app=api_blueprint, version='1.0', title='sanskrit_parser API',
description='For detailed intro and to report issues: see <a href="https://github.com/kmadathil/sanskrit_parser">here</a>. '
'A list of REST and non-REST API routes avalilable on this server: <a href="../sitemap">sitemap</a>.',
default_label=api_blueprint.name,
prefix=URL_PREFIX, doc='/docs')
analyzer = LexicalSandhiAnalyzer()
def jedge(pred, node, label):
return (node.pada.devanagari(strict_io=False),
jtag(node.getMorphologicalTags()),
SanskritObject(label, encoding=SLP1).devanagari(strict_io=False),
pred.pada.devanagari(strict_io=False))
def jnode(node):
""" Helper to translate parse node into serializable format"""
return (node.pada.devanagari(strict_io=False),
jtag(node.getMorphologicalTags()), "", "")
def jtag(tag):
""" Helper to translate tag to serializable format"""
return (tag[0].devanagari(strict_io=False), [t.devanagari(strict_io=False) for t in list(tag[1])])
def jtags(tags):
""" Helper to translate tags to serializable format"""
return [jtag(x) for x in tags]
@api.route('/version/')
class Version(Resource):
def get(self):
"""Library Version"""<|fim▁hole|>@api.route('/tags/<string:p>')
class Tags(Resource):
def get(self, p):
""" Get lexical tags for p """
pobj = SanskritObject(p, strict_io=False)
tags = analyzer.getMorphologicalTags(pobj)
if tags is not None:
ptags = jtags(tags)
else:
ptags = []
r = {"input": p, "devanagari": pobj.devanagari(), "tags": ptags}
return r
@api.route('/splits/<string:v>')
class Splits(Resource):
def get(self, v):
""" Get lexical tags for v """
strict_p = True
if request.args.get("strict") == "false":
strict_p = False
vobj = SanskritObject(v, strict_io=strict_p, replace_ending_visarga=None)
g = analyzer.getSandhiSplits(vobj)
if g:
splits = g.find_all_paths(10)
jsplits = [[ss.devanagari(strict_io=False) for ss in s] for s in splits]
else:
jsplits = []
r = {"input": v, "devanagari": vobj.devanagari(), "splits": jsplits}
return r
@api.route('/parse-presegmented/<string:v>')
class Parse_Presegmented(Resource):
def get(self, v):
""" Parse a presegmented sentence """
strict_p = True
if request.args.get("strict") == "false":
strict_p = False
vobj = SanskritObject(v, strict_io=strict_p, replace_ending_visarga=None)
parser = Parser(input_encoding="SLP1",
output_encoding="Devanagari",
replace_ending_visarga='s')
mres = []
print(v)
for split in parser.split(vobj.canonical(), limit=10, pre_segmented=True):
parses = list(split.parse(limit=10))
sdot = split.to_dot()
mres = [x.serializable() for x in parses]
pdots = [x.to_dot() for x in parses]
r = {"input": v, "devanagari": vobj.devanagari(), "analysis": mres,
"split_dot": sdot,
"parse_dots": pdots}
return r
@api.route('/presegmented/<string:v>')
class Presegmented(Resource):
def get(self, v):
""" Presegmented Split """
vobj = SanskritObject(v, strict_io=True, replace_ending_visarga=None)
parser = Parser(input_encoding="SLP1",
output_encoding="Devanagari",
replace_ending_visarga='s')
splits = parser.split(vobj.canonical(), limit=10, pre_segmented=True)
r = {"input": v, "devanagari": vobj.devanagari(), "splits": [x.serializable()['split'] for x in splits]}
return r<|fim▁end|>
|
r = {"version": str(__version__)}
return r
|
<|file_name|>iso_metadata_parser.py<|end_file_name|><|fim▁begin|>""" A module to contain utility ISO-19115 metadata parsing helpers """
from _collections import OrderedDict
from copy import deepcopy
from frozendict import frozendict as FrozenOrderedDict
from parserutils.collections import filter_empty, reduce_value, wrap_value
from parserutils.elements import get_element_name, get_element_text, get_elements_text
from parserutils.elements import get_elements, get_remote_element, insert_element, remove_element
from parserutils.elements import XPATH_DELIM
from gis_metadata.exceptions import InvalidContent
from gis_metadata.metadata_parser import MetadataParser
from gis_metadata.utils import DATE_TYPE, DATE_TYPE_SINGLE, DATE_TYPE_MULTIPLE
from gis_metadata.utils import DATE_TYPE_RANGE, DATE_TYPE_RANGE_BEGIN, DATE_TYPE_RANGE_END
from gis_metadata.utils import ATTRIBUTES
from gis_metadata.utils import CONTACTS
from gis_metadata.utils import BOUNDING_BOX
from gis_metadata.utils import DATES
from gis_metadata.utils import DIGITAL_FORMS
from gis_metadata.utils import KEYWORDS_PLACE, KEYWORDS_STRATUM, KEYWORDS_TEMPORAL, KEYWORDS_THEME
from gis_metadata.utils import LARGER_WORKS
from gis_metadata.utils import PROCESS_STEPS
from gis_metadata.utils import RASTER_DIMS, RASTER_INFO
from gis_metadata.utils import COMPLEX_DEFINITIONS, ParserProperty
from gis_metadata.utils import format_xpaths, get_default_for_complex, get_default_for_complex_sub
from gis_metadata.utils import parse_complex_list, parse_property, update_complex_list, update_property
ISO_ROOTS = ('MD_Metadata', 'MI_Metadata')
KEYWORD_PROPS = (KEYWORDS_PLACE, KEYWORDS_STRATUM, KEYWORDS_TEMPORAL, KEYWORDS_THEME)
KEYWORD_TYPES = FrozenOrderedDict({
KEYWORDS_PLACE: 'place',
KEYWORDS_STRATUM: 'stratum',
KEYWORDS_TEMPORAL: 'temporal',
KEYWORDS_THEME: 'theme'
})
# For appending digital form content to ISO distribution format specs
ISO_DIGITAL_FORMS_DELIM = '@------------------------------@'
# Define backup locations for attribute sub-properties and dimension type property
ISO_DEFINITIONS = dict({k: dict(v) for k, v in dict(COMPLEX_DEFINITIONS).items()})
ISO_DEFINITIONS[ATTRIBUTES].update({
'_definition_source': '{_definition_src}',
'__definition_source': '{__definition_src}',
'___definition_source': '{___definition_src}'
})
ISO_DEFINITIONS[RASTER_DIMS]['_type'] = '{_type}'
ISO_DEFINITIONS = FrozenOrderedDict({k: FrozenOrderedDict(v) for k, v in ISO_DEFINITIONS.items()})
ISO_TAG_ROOTS = OrderedDict((
# First process private dependency tags (order enforced by key sorting)
('_content_coverage', 'contentInfo/MD_CoverageDescription'),
('_dataqual', 'dataQualityInfo/DQ_DataQuality'),
('_dataqual_lineage', '{_dataqual}/lineage/LI_Lineage'),
('_dataqual_report', '{_dataqual}/report'),
('_distinfo', 'distributionInfo/MD_Distribution'),
('_distinfo_dist', '{_distinfo}/distributor/MD_Distributor'),
('_distinfo_proc', '{_distinfo_dist}/distributionOrderProcess/MD_StandardOrderProcess'),
('_distinfo_resp', '{_distinfo_dist}/distributorContact/CI_ResponsibleParty'),
('_distinfo_resp_contact', '{_distinfo_resp}/contactInfo/CI_Contact'),
('_distinfo_rsrc', '{_distinfo}/transferOptions/MD_DigitalTransferOptions/onLine/CI_OnlineResource'),
('_idinfo', 'identificationInfo/MD_DataIdentification'),
('_idinfo_aggregate', '{_idinfo}/aggregationInfo/MD_AggregateInformation'),
('_idinfo_aggregate_citation', '{_idinfo_aggregate}/aggregateDataSetName/CI_Citation'),
('_idinfo_aggregate_contact', '{_idinfo_aggregate_citation}/citedResponsibleParty/CI_ResponsibleParty'),
('_idinfo_citation', '{_idinfo}/citation/CI_Citation'),
('_idinfo_citresp', '{_idinfo_citation}/citedResponsibleParty/CI_ResponsibleParty'),
('_idinfo_extent', '{_idinfo}/extent/EX_Extent'),
('_idinfo_keywords', '{_idinfo}/descriptiveKeywords/MD_Keywords'),
('_idinfo_resp', '{_idinfo}/pointOfContact/CI_ResponsibleParty'),
('_idinfo_resp_contact', '{_idinfo_resp}/contactInfo/CI_Contact'),
('_srinfo_grid_rep', 'spatialRepresentationInfo/MD_GridSpatialRepresentation'),
('_srinfo_grid_dim', '{_srinfo_grid_rep}/axisDimensionProperties/MD_Dimension'),
# Supported in separate file ISO-19110: FC_FeatureCatalog
('_attr_root', 'FC_FeatureCatalogue'),
('_attr_base', 'featureType/FC_FeatureType/carrierOfCharacteristics/FC_FeatureAttribute'),
('_attr_def', '{_attr_base}/definitionReference/FC_DefinitionReference/definitionSource/FC_DefinitionSource'),
('_attr_src', '{_attr_def}/source/CI_Citation/citedResponsibleParty/CI_ResponsibleParty'),
# References to separate file ISO-19110 from: MD_Metadata
('_attr_citation', 'contentInfo/MD_FeatureCatalogueDescription/featureCatalogueCitation'),
('_attr_contact', '{_attr_citation}/CI_Citation/citedResponsibleParty/CI_ResponsibleParty/contactInfo/CI_Contact'),
('_attr_contact_url', '{_attr_contact}/onlineResource/CI_OnlineResource/linkage/URL')
))
# Two passes required because of self references within roots dict
ISO_TAG_ROOTS.update(format_xpaths(ISO_TAG_ROOTS, **ISO_TAG_ROOTS))
ISO_TAG_ROOTS.update(format_xpaths(ISO_TAG_ROOTS, **ISO_TAG_ROOTS))
ISO_TAG_ROOTS = FrozenOrderedDict(ISO_TAG_ROOTS)
ISO_TAG_FORMATS = {
# Property-specific xpath roots: the base from which each element repeats
'_attribute_accuracy_root': '{_dataqual_report}',
'_attributes_root': 'featureType/FC_FeatureType/carrierOfCharacteristics',
'_bounding_box_root': '{_idinfo_extent}/geographicElement',
'_contacts_root': '{_idinfo}/pointOfContact',
'_dataset_completeness_root': '{_dataqual_report}',
'_dates_root': '{_idinfo_extent}/temporalElement',
'_digital_forms_root': '{_distinfo}/distributionFormat',
'_dist_liability_root': '{_idinfo}/resourceConstraints',
'_transfer_options_root': '{_distinfo}/transferOptions/MD_DigitalTransferOptions/onLine',
'_keywords_root': '{_idinfo}/descriptiveKeywords',
'_larger_works_root': '{_idinfo_aggregate_citation}',
'_process_steps_root': '{_dataqual_lineage}/processStep',
'_raster_info_root': '{_srinfo_grid_rep}/axisDimensionProperties',
'_use_constraints_root': '{_idinfo}/resourceConstraints',
# Then process public dependent tags
'title': '{_idinfo_citation}/title/CharacterString',
'abstract': '{_idinfo}/abstract/CharacterString',
'purpose': '{_idinfo}/purpose/CharacterString',
'supplementary_info': '{_idinfo}/supplementalInformation/CharacterString',
'online_linkages': '{_idinfo_citresp}/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/linkage/URL',
'originators': '{_idinfo_citresp}/organisationName/CharacterString',
'publish_date': '{_idinfo_citation}/date/CI_Date/date/Date',
'publish_date_type': '{_idinfo_citation}/date/CI_Date/dateType/CI_DateTypeCode',
'data_credits': '{_idinfo}/credit/CharacterString',
CONTACTS: '{_idinfo_resp}/{{ct_path}}',
'dist_contact_org': '{_distinfo_resp}/organisationName/CharacterString',
'dist_contact_person': '{_distinfo_resp}/individualName/CharacterString',
'dist_address_type': '{_distinfo_resp_contact}/address/@type',
'dist_address': '{_distinfo_resp_contact}/address/CI_Address/deliveryPoint/CharacterString',
'dist_city': '{_distinfo_resp_contact}/address/CI_Address/city/CharacterString',
'dist_state': '{_distinfo_resp_contact}/address/CI_Address/administrativeArea/CharacterString',
'dist_postal': '{_distinfo_resp_contact}/address/CI_Address/postalCode/CharacterString',
'dist_country': '{_distinfo_resp_contact}/address/CI_Address/country/CharacterString',
'_dist_country': '{_distinfo_resp_contact}/address/CI_Address/country/Country', # If not in CharacterString
'dist_phone': '{_distinfo_resp_contact}/phone/CI_Telephone/voice/CharacterString',
'dist_email': '{_distinfo_resp_contact}/address/CI_Address/electronicMailAddress/CharacterString',
'dist_liability': '{_idinfo}/resourceConstraints/MD_LegalConstraints/otherConstraints/CharacterString',
'processing_fees': '{_distinfo_proc}/fees/CharacterString',
'processing_instrs': '{_distinfo_proc}/orderingInstructions/CharacterString',
'resource_desc': '{_idinfo}/resourceSpecificUsage/MD_Usage/specificUsage/CharacterString',
'tech_prerequisites': '{_idinfo}/environmentDescription/CharacterString',
ATTRIBUTES: '{_attr_base}/{{ad_path}}',
'_attributes_file': '{_attr_citation}/@href',
'__attributes_file': '{_attr_contact_url}', # If not in above: "_attr_citation/@href"
'attribute_accuracy': '{_dataqual_report}/DQ_QuantitativeAttributeAccuracy/measureDescription/CharacterString',
BOUNDING_BOX: '{_idinfo_extent}/geographicElement/EX_GeographicBoundingBox/{{bbox_path}}',
'dataset_completeness': '{_dataqual_report}/DQ_CompletenessOmission/measureDescription/CharacterString',
DIGITAL_FORMS: '{_distinfo}/distributionFormat/MD_Format/{{df_path}}',
'_access_desc': '{_distinfo_rsrc}/description/CharacterString',
'_access_instrs': '{_distinfo_rsrc}/protocol/CharacterString',
'_network_resource': '{_distinfo_rsrc}/linkage/URL',
PROCESS_STEPS: '{_dataqual_lineage}/processStep/LI_ProcessStep/{{ps_path}}',
LARGER_WORKS: '{_idinfo_aggregate_citation}/{{lw_path}}',
'_lw_citation': '{_idinfo_aggregate_contact}/{{lw_path}}',
'_lw_collective': '{_idinfo_aggregate_citation}/collectiveTitle/CharacterString',
'_lw_contact': '{_idinfo_aggregate_contact}/contactInfo/CI_Contact/{{lw_path}}',
'_lw_linkage': '{_idinfo_aggregate_contact}/contactInfo/CI_Contact/onlineResource/CI_OnlineResource/{{lw_path}}',
RASTER_INFO: '{_srinfo_grid_dim}/{{ri_path}}',
'_ri_num_dims': '{_srinfo_grid_rep}/numberOfDimensions/Integer',
'other_citation_info': '{_idinfo_citation}/otherCitationDetails/CharacterString',
'use_constraints': '{_idinfo}/resourceConstraints/MD_Constraints/useLimitation/CharacterString',
DATES: '{_idinfo_extent}/temporalElement/EX_TemporalExtent/extent/{{type_path}}',
KEYWORDS_PLACE: '{_idinfo_keywords}/keyword/CharacterString',
KEYWORDS_STRATUM: '{_idinfo_keywords}/keyword/CharacterString',
KEYWORDS_TEMPORAL: '{_idinfo_keywords}/keyword/CharacterString',
KEYWORDS_THEME: '{_idinfo_keywords}/keyword/CharacterString'
}
# Apply XPATH root formats to the basic data map formats
ISO_TAG_FORMATS.update(ISO_TAG_ROOTS)
ISO_TAG_FORMATS.update(format_xpaths(ISO_TAG_FORMATS, **ISO_TAG_ROOTS))
ISO_TAG_FORMATS = FrozenOrderedDict(ISO_TAG_FORMATS)
ISO_TAG_PRIMITIVES = frozenset({
'Binary', 'Boolean', 'CharacterString',
'Date', 'DateTime', 'timePosition',
'Decimal', 'Integer', 'Real', 'RecordType',
'CI_DateTypeCode', 'MD_KeywordTypeCode', 'URL'
})
class IsoParser(MetadataParser):
""" A class to parse metadata files conforming to the ISO-19115 standard """
def _init_data_map(self):
""" OVERRIDDEN: Initialize required ISO-19115 data map with XPATHS and specialized functions """
if self._data_map is not None:
return # Initiation happens once
# Parse and validate the ISO metadata root
if self._xml_tree is None:
iso_root = ISO_ROOTS[0]
else:
iso_root = get_element_name(self._xml_tree)
if iso_root not in ISO_ROOTS:
raise InvalidContent('Invalid XML root for ISO-19115 standard: {root}', root=iso_root)
iso_data_map = {'_root': iso_root}
iso_data_map.update(ISO_TAG_ROOTS)
iso_data_map.update(ISO_TAG_FORMATS)
iso_data_structures = {}
# Capture and format complex XPATHs
ad_format = iso_data_map[ATTRIBUTES]
ft_source = iso_data_map['_attr_src'].replace('/carrierOfCharacteristics/FC_FeatureAttribute', '')
iso_data_structures[ATTRIBUTES] = format_xpaths(
ISO_DEFINITIONS[ATTRIBUTES],
label=ad_format.format(ad_path='memberName/LocalName'),
aliases=ad_format.format(ad_path='aliases/LocalName'), # Not in spec
definition=ad_format.format(ad_path='definition/CharacterString'),
# First try to populate attribute definition source from FC_FeatureAttribute
definition_src=iso_data_map['_attr_src'] + '/organisationName/CharacterString',
_definition_src=iso_data_map['_attr_src'] + '/individualName/CharacterString',
# Then assume feature type source is the same as attribute: populate from FC_FeatureType
__definition_src=ft_source + '/organisationName/CharacterString',
___definition_src=ft_source + '/individualName/CharacterString'
)
bb_format = iso_data_map[BOUNDING_BOX]
iso_data_structures[BOUNDING_BOX] = format_xpaths(
ISO_DEFINITIONS[BOUNDING_BOX],
east=bb_format.format(bbox_path='eastBoundLongitude/Decimal'),
south=bb_format.format(bbox_path='southBoundLatitude/Decimal'),
west=bb_format.format(bbox_path='westBoundLongitude/Decimal'),
north=bb_format.format(bbox_path='northBoundLatitude/Decimal')
)
ct_format = iso_data_map[CONTACTS]
iso_data_structures[CONTACTS] = format_xpaths(
ISO_DEFINITIONS[CONTACTS],
name=ct_format.format(ct_path='individualName/CharacterString'),
organization=ct_format.format(ct_path='organisationName/CharacterString'),
position=ct_format.format(ct_path='positionName/CharacterString'),
email=ct_format.format(
ct_path='contactInfo/CI_Contact/address/CI_Address/electronicMailAddress/CharacterString'
)
)
dt_format = iso_data_map[DATES]
iso_data_structures[DATES] = {
DATE_TYPE_MULTIPLE: dt_format.format(type_path='TimeInstant/timePosition'),
DATE_TYPE_RANGE_BEGIN: dt_format.format(type_path='TimePeriod/begin/TimeInstant/timePosition'),
DATE_TYPE_RANGE_END: dt_format.format(type_path='TimePeriod/end/TimeInstant/timePosition'),
DATE_TYPE_SINGLE: dt_format.format(type_path='TimeInstant/timePosition') # Same as multiple
}
iso_data_structures[DATES][DATE_TYPE_RANGE] = [
iso_data_structures[DATES][DATE_TYPE_RANGE_BEGIN],
iso_data_structures[DATES][DATE_TYPE_RANGE_END]
]
df_format = iso_data_map[DIGITAL_FORMS]
iso_data_structures[DIGITAL_FORMS] = format_xpaths(
ISO_DEFINITIONS[DIGITAL_FORMS],
name=df_format.format(df_path='name/CharacterString'),
content='', # Not supported in ISO-19115 (appending to spec)
decompression=df_format.format(df_path='fileDecompressionTechnique/CharacterString'),
version=df_format.format(df_path='version/CharacterString'),
specification=df_format.format(df_path='specification/CharacterString'),
access_desc=iso_data_map['_access_desc'],
access_instrs=iso_data_map['_access_instrs'],
network_resource=iso_data_map['_network_resource']
)
keywords_structure = {
'keyword_root': 'MD_Keywords/keyword',
'keyword_type': 'MD_Keywords/type/MD_KeywordTypeCode',
'keyword': 'MD_Keywords/keyword/CharacterString'
}
for keyword_prop in KEYWORD_PROPS:
iso_data_structures[keyword_prop] = deepcopy(keywords_structure)
lw_format = iso_data_map[LARGER_WORKS]
iso_data_structures[LARGER_WORKS] = format_xpaths(
ISO_DEFINITIONS[LARGER_WORKS],
title=lw_format.format(lw_path='title/CharacterString'),
edition=lw_format.format(lw_path='edition/CharacterString'),
origin=iso_data_map['_lw_citation'].format(lw_path='individualName/CharacterString'),
online_linkage=iso_data_map['_lw_linkage'].format(lw_path='linkage/URL'),
other_citation=lw_format.format(lw_path='otherCitationDetails/CharacterString'),
date=lw_format.format(lw_path='editionDate/Date'),
place=iso_data_map['_lw_contact'].format(lw_path='address/CI_Address/city/CharacterString'),
info=iso_data_map['_lw_citation'].format(lw_path='organisationName/CharacterString')
)
ps_format = iso_data_map[PROCESS_STEPS]
iso_data_structures[PROCESS_STEPS] = format_xpaths(
ISO_DEFINITIONS[PROCESS_STEPS],
description=ps_format.format(ps_path='description/CharacterString'),
date=ps_format.format(ps_path='dateTime/DateTime'),
sources=ps_format.format(
ps_path='source/LI_Source/sourceCitation/CI_Citation/alternateTitle/CharacterString'
)
)
ri_format = iso_data_map[RASTER_INFO]
iso_data_structures[RASTER_INFO] = format_xpaths(
ISO_DEFINITIONS[RASTER_DIMS],
type=ri_format.format(ri_path='dimensionName/MD_DimensionNameTypeCode'),
_type=ri_format.format(ri_path='dimensionName/MD_DimensionNameTypeCode/@codeListValue'),
size=ri_format.format(ri_path='dimensionSize/Integer'),
value=ri_format.format(ri_path='resolution/Measure'),
units=ri_format.format(ri_path='resolution/Measure/@uom')
)
# Assign XPATHS and gis_metadata.utils.ParserProperties to data map
for prop, xpath in dict(iso_data_map).items():
if prop == ATTRIBUTES:
iso_data_map[prop] = ParserProperty(self._parse_attribute_details, self._update_attribute_details)
elif prop in (CONTACTS, PROCESS_STEPS):
iso_data_map[prop] = ParserProperty(self._parse_complex_list, self._update_complex_list)
elif prop in (BOUNDING_BOX, LARGER_WORKS):
iso_data_map[prop] = ParserProperty(self._parse_complex, self._update_complex)
elif prop == DATES:
iso_data_map[prop] = ParserProperty(self._parse_dates, self._update_dates)
elif prop == DIGITAL_FORMS:
iso_data_map[prop] = ParserProperty(self._parse_digital_forms, self._update_digital_forms)
elif prop in KEYWORD_PROPS:
iso_data_map[prop] = ParserProperty(self._parse_keywords, self._update_keywords)
elif prop == RASTER_INFO:
iso_data_map[prop] = ParserProperty(self._parse_raster_info, self._update_raster_info)
else:
iso_data_map[prop] = xpath
self._data_map = iso_data_map
self._data_structures = iso_data_structures
def _parse_attribute_details(self, prop=ATTRIBUTES):
""" Concatenates a list of Attribute Details data structures parsed from a remote file """
parsed_attributes = self._parse_attribute_details_file(prop)
if parsed_attributes is None:
# If not in the (official) remote location, try the tree itself
parsed_attributes = self._parse_complex_list(prop)
for attribute in (a for a in parsed_attributes if not a['aliases']):
# Aliases are not in ISO standard: default to label
attribute['aliases'] = attribute['label']
return get_default_for_complex(prop, parsed_attributes)
def _parse_attribute_details_file(self, prop=ATTRIBUTES):
""" Concatenates a list of Attribute Details data structures parsed from a remote file """
# Parse content from remote file URL, which may be stored in one of two places:
# Starting at: contentInfo/MD_FeatureCatalogueDescription/featureCatalogueCitation
# ATTRIBUTE: href
# ELEMENT TEXT: CI_Citation/.../CI_Contact/onlineResource/CI_OnlineResource/linkage
self._attr_details_file_url = parse_property(
self._xml_tree, None, self._data_map, '_attributes_file'
)
if not self._attr_details_file_url:
self._attr_details_file_url = None
return None
try:
tree_to_parse = get_remote_element(self._attr_details_file_url)
except Exception:
self._attr_details_file_url = None
return None
xpath_map = self._data_structures[ATTRIBUTES]
xpath_root = self._get_xroot_for(prop)
return parse_complex_list(tree_to_parse, xpath_root, xpath_map, prop)
def _parse_digital_forms(self, prop=DIGITAL_FORMS):
""" Concatenates a list of Digital Form data structures parsed from the metadata """
xpath_map = self._data_structures[prop]
# Parse base digital form fields: 'name', 'content', 'decompression', 'version', 'specification'
xpath_root = self._data_map['_digital_forms_root']
digital_forms = parse_complex_list(self._xml_tree, xpath_root, xpath_map, prop)
# Parse digital form transfer option fields: 'access_desc', 'access_instrs', 'network_resource'
xpath_root = self._data_map['_transfer_options_root']
transfer_opts = parse_complex_list(self._xml_tree, xpath_root, xpath_map, prop)
# Split out digital form content that has been appended to specifications
content_delim = ISO_DIGITAL_FORMS_DELIM
for digital_form in digital_forms:
specs = reduce_value(digital_form['specification'])
specs = specs.splitlines() if isinstance(specs, str) else specs
specifications = wrap_value(s.strip() for s in specs)
digital_form['content'] = []
digital_form['specification'] = []
has_content = False
# For each specification, insert delim before appending content
for spec in specifications:
has_content = has_content or spec == content_delim
if not has_content:
digital_form['specification'].append(spec)
elif spec != content_delim:
digital_form['content'].append(spec)
# Reduce spec and content to single string values if possible
for form_prop in ('content', 'specification'):
digital_form[form_prop] = reduce_value(filter_empty(digital_form[form_prop], u''))
# Combine digital forms and transfer options into a single complex struct
df_len = len(digital_forms)
to_len = len(transfer_opts)
parsed_forms = []
for idx in range(0, max(df_len, to_len)):
digital_form = {}.fromkeys(ISO_DEFINITIONS[prop], u'')
if idx < df_len:
digital_form.update(i for i in digital_forms[idx].items() if i[1])
if idx < to_len:
digital_form.update(i for i in transfer_opts[idx].items() if i[1])
if any(digital_form.values()):
parsed_forms.append(digital_form)
return get_default_for_complex(prop, parsed_forms)
def _parse_keywords(self, prop):
""" Parse type-specific keywords from the metadata: Theme or Place """
keywords = []
if prop in KEYWORD_PROPS:
xpath_root = self._data_map['_keywords_root']
xpath_map = self._data_structures[prop]
xtype = xpath_map['keyword_type']
xpath = xpath_map['keyword']
ktype = KEYWORD_TYPES[prop]
for element in get_elements(self._xml_tree, xpath_root):
if get_element_text(element, xtype).lower() == ktype.lower():
keywords.extend(get_elements_text(element, xpath))
return keywords
def _parse_raster_info(self, prop=RASTER_INFO):
""" Collapses multiple dimensions into a single raster_info complex struct """
raster_info = {}.fromkeys(ISO_DEFINITIONS[prop], u'')
# Ensure conversion of lists to newlines is in place
raster_info['dimensions'] = get_default_for_complex_sub(
prop=prop,
subprop='dimensions',
value=parse_property(self._xml_tree, None, self._data_map, '_ri_num_dims'),
xpath=self._data_map['_ri_num_dims']
)
xpath_root = self._get_xroot_for(prop)
xpath_map = self._data_structures[prop]
for dimension in parse_complex_list(self._xml_tree, xpath_root, xpath_map, RASTER_DIMS):
dimension_type = dimension['type'].lower()
if dimension_type == 'vertical':
raster_info['vertical_count'] = dimension['size']
elif dimension_type == 'column':
raster_info['column_count'] = dimension['size']
raster_info['x_resolution'] = u' '.join(dimension[k] for k in ['value', 'units']).strip()
elif dimension_type == 'row':
raster_info['row_count'] = dimension['size']
raster_info['y_resolution'] = u' '.join(dimension[k] for k in ['value', 'units']).strip()
return raster_info if any(raster_info[k] for k in raster_info) else {}
def _update_attribute_details(self, **update_props):
""" Update operation for ISO Attribute Details metadata: write to "MD_Metadata/featureType" """
tree_to_update = update_props['tree_to_update']
xpath = self._data_map['_attr_citation']
# Cannot write to remote file: remove the featureCatalogueCitation element
self._attr_details_file_url = None
remove_element(tree_to_update, xpath, True)
return self._update_complex_list(**update_props)
def _update_dates(self, **update_props):
"""
Update operation for ISO Dates metadata
:see: gis_metadata.utils.COMPLEX_DEFINITIONS[DATES]
"""
tree_to_update = update_props['tree_to_update']
xpath_root = self._data_map['_dates_root']
if self.dates:
date_type = self.dates[DATE_TYPE]
# First remove all date info from common root
remove_element(tree_to_update, xpath_root)
if date_type == DATE_TYPE_MULTIPLE:
xpath_root += '/TimeInstant'
elif date_type == DATE_TYPE_RANGE:
xpath_root += '/TimePeriod'
return super(IsoParser, self)._update_dates(xpath_root, **update_props)
def _update_digital_forms(self, **update_props):
"""
Update operation for ISO Digital Forms metadata
:see: gis_metadata.utils.COMPLEX_DEFINITIONS[DIGITAL_FORMS]
"""
digital_forms = wrap_value(update_props['values'])
# Update all Digital Form properties: distributionFormat*
xpath_map = self._data_structures[update_props['prop']]
dist_format_props = ('name', 'decompression', 'version', 'specification')
dist_format_xroot = self._data_map['_digital_forms_root']
dist_format_xmap = {prop: xpath_map[prop] for prop in dist_format_props}
dist_formats = []
for digital_form in digital_forms:
dist_format = {prop: digital_form[prop] for prop in dist_format_props}
if digital_form.get('content'):
dist_spec = wrap_value(digital_form.get('specification'))
dist_spec.append(ISO_DIGITAL_FORMS_DELIM)
dist_spec.extend(wrap_value(digital_form['content']))
dist_format['specification'] = dist_spec
dist_formats.append(dist_format)
update_props['values'] = dist_formats
dist_formats = update_complex_list(
xpath_root=dist_format_xroot, xpath_map=dist_format_xmap, **update_props<|fim▁hole|>
trans_option_props = ('access_desc', 'access_instrs', 'network_resource')
trans_option_xroot = self._data_map['_transfer_options_root']
trans_option_xmap = {prop: self._data_map['_' + prop] for prop in trans_option_props}
trans_options = []
for digital_form in digital_forms:
trans_options.append({prop: digital_form[prop] for prop in trans_option_props})
update_props['values'] = trans_options
trans_options = update_complex_list(
xpath_root=trans_option_xroot, xpath_map=trans_option_xmap, **update_props
)
return {
'distribution_formats': dist_formats,
'transfer_options': trans_options
}
def _update_keywords(self, **update_props):
""" Update operation for ISO type-specific Keywords metadata: Theme or Place """
tree_to_update = update_props['tree_to_update']
prop = update_props['prop']
values = update_props['values']
keywords = []
if prop in KEYWORD_PROPS:
xpath_root = self._data_map['_keywords_root']
xpath_map = self._data_structures[prop]
xtype = xpath_map['keyword_type']
xroot = xpath_map['keyword_root']
xpath = xpath_map['keyword']
ktype = KEYWORD_TYPES[prop]
# Remove descriptiveKeyword nodes according to type
for element in get_elements(tree_to_update, xpath_root):
if get_element_text(element, xtype).lower() == ktype.lower():
remove_element(tree_to_update, xpath_root)
element = insert_element(tree_to_update, 0, xpath_root)
insert_element(element, 0, xtype, ktype) # Add the type node
keywords.extend(update_property(element, xroot, xpath, prop, values))
return keywords
def _update_raster_info(self, **update_props):
""" Derives multiple dimensions from a single raster_info complex struct """
tree_to_update = update_props['tree_to_update']
prop = update_props['prop']
values = update_props.pop('values')
# Update number of dimensions at raster_info root (applies to all dimensions below)
xroot, xpath = None, self._data_map['_ri_num_dims']
raster_info = [update_property(tree_to_update, xroot, xpath, prop, values.get('dimensions', u''))]
# Derive vertical, longitude, and latitude dimensions from raster_info
xpath_root = self._get_xroot_for(prop)
xpath_map = self._data_structures[prop]
v_dimension = {}
if values.get('vertical_count'):
v_dimension = v_dimension.fromkeys(xpath_map, u'')
v_dimension['type'] = 'vertical'
v_dimension['size'] = values.get('vertical_count', u'')
x_dimension = {}
if values.get('column_count') or values.get('x_resolution'):
x_dimension = x_dimension.fromkeys(xpath_map, u'')
x_dimension['type'] = 'column'
x_dimension['size'] = values.get('column_count', u'')
x_dimension['value'] = values.get('x_resolution', u'')
y_dimension = {}
if values.get('row_count') or values.get('y_resolution'):
y_dimension = y_dimension.fromkeys(xpath_map, u'')
y_dimension['type'] = 'row'
y_dimension['size'] = values.get('row_count', u'')
y_dimension['value'] = values.get('y_resolution', u'')
# Update derived dimensions as complex list, and append affected elements for return
update_props['prop'] = RASTER_DIMS
update_props['values'] = [v_dimension, x_dimension, y_dimension]
raster_info += update_complex_list(xpath_root=xpath_root, xpath_map=xpath_map, **update_props)
return raster_info
def update(self, use_template=False, **metadata_defaults):
""" OVERRIDDEN: Prevents writing multiple CharacterStrings per XPATH property """
self.validate()
tree_to_update = self._xml_tree if not use_template else self._get_template(**metadata_defaults)
supported_props = self._metadata_props
# Iterate over keys, and extract non-primitive root for all XPATHs
# xroot = identificationInfo/MD_DataIdentification/abstract/
# xpath = identificationInfo/MD_DataIdentification/abstract/CharacterString
#
# This prevents multiple primitive tags from being inserted under an element
for prop, xpath in self._data_map.items():
if not prop.startswith('_') or prop.strip('_') in supported_props:
# Send only public or alternate properties
xroot = self._trim_xpath(xpath, prop)
values = getattr(self, prop, u'')
update_property(tree_to_update, xroot, xpath, prop, values, supported_props)
return tree_to_update
def _trim_xpath(self, xpath, prop):
""" Removes primitive type tags from an XPATH """
xroot = self._get_xroot_for(prop)
if xroot is None and isinstance(xpath, str):
xtags = xpath.split(XPATH_DELIM)
if xtags[-1] in ISO_TAG_PRIMITIVES:
xroot = XPATH_DELIM.join(xtags[:-1])
return xroot<|fim▁end|>
|
)
# Update all Network Resources: transferOptions+
|
<|file_name|>subs.tsx<|end_file_name|><|fim▁begin|>import {
Dequeue,
MarkTrackSkipped,
Pause,
Play,
SeekBackwards,
SeekForwards,
Stop,
UpdateQueue,
} from "./actions";
import { ApiRequest, SendCommand } from "./effects";
import { Keyboard, Interval } from "hyperapp-fx";
import { MQTTSubscribe } from "@shish2k/hyperapp-mqtt";
import { http2ws } from "./utils";
/**
* Connect to the MQTT server, listen for queue / settings state updates,
* and react to commands on the command channel
*/
export function getOpenMQTTListener(
state: State,
): Subscription | null {
if (!state.room_name) {
return null;
}
return MQTTSubscribe({
url: http2ws(state.root) + "/mqtt",
username: state.room_name,
password: state.room_password,
topic: "karakara/room/" + state.room_name + "/#",
connect(state: State): Dispatchable {
return { ...state, connected: true };
},
close(state: State): Dispatchable {
return { ...state, connected: false };
},
message(state: State, msg): Dispatchable {
// msg = mqtt-packet
const topic: string = msg.topic.split("/").pop();
const data: string = msg.payload.toString();
console.groupCollapsed("mqtt_onmessage(", topic, ")");
try {
console.log(JSON.parse(data));
} catch (error) {
console.log(data);
}
console.groupEnd();
switch (topic) {
case "commands":
const cmd = data.trim();
switch (cmd) {
case "play":
return Play(state);
case "stop":
return Stop(state);
case "pause":
return Pause(state);
case "seek_forwards":
return SeekForwards(state, null);
case "seek_backwards":
return SeekBackwards(state, null);
case "played":
return Dequeue(state);
// Only one instance should mark the current track as skipped, to avoid
// skipping two tracks
case "skip":
return state.podium
? Dequeue(state)
: MarkTrackSkipped(state);
default:
return state;
}
case "settings":
return {
...state,
settings: {
...state.settings,
...JSON.parse(data),
},
};
case "queue":
return UpdateQueue(state, JSON.parse(data));
default:
return state;
}
},
});
}
export const KeyboardListener = Keyboard({
downs: true,
action(state: State, event: KeyboardEvent): Dispatchable {
// Disable keyboard shortcuts when the settings
// screen is active
if (state.show_settings) {
return state;
}
let action: CallableFunction | null = null;
switch (event.key) {
case "s":
action = Dequeue;
break; // skip
case "Enter":
action = Play;
break;
case "Escape":
action = Stop;
break;
case "ArrowLeft":
action = SeekBackwards;
break;
case "ArrowRight":
action = SeekForwards;
break;
case " ":
action = Pause;
break;
}
if (action) {
event.preventDefault();
return action(state);
}
return state;
},
});
export const IntervalListener = Interval({
every: 200,
action(state: State, timestamp): Dispatchable {
if (
state.progress >= state.settings["karakara.player.autoplay.seconds"]
) {
return [state, SendCommand(state, "play")];
} else {
return { ...state, progress: state.progress + 1 / 5 };
}<|fim▁hole|>function _friSubscriber(dispatch, props) {
// subscription is restarted whenever props changes,
// and props is just {room: state.room_name}
if (props.room) {
setTimeout(function () {
dispatch((state) => [
state,
ApiRequest({
function: "random_images",
state: state,
action(state: State, response): State {
let images = response.data.images.slice(0, 25);
return {
...state,
images: images.map((fn, n) => ({
filename: fn,
x: n / images.length,
delay: Math.random() * 10,
})),
};
},
}),
]);
}, 0);
}
return function () {
// no unsubscribe
};
}
export function FetchRandomImages(room_name: string): Subscription {
return [_friSubscriber, { room: room_name }];
}<|fim▁end|>
|
},
});
|
<|file_name|>htmltablecolelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLTableColElement {
htmlelement: HTMLElement,
}
impl HTMLTableColElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLTableColElement {
HTMLTableColElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLTableColElement> {
let n = Node::reflect_node(
Box::new(HTMLTableColElement::new_inherited(
local_name, prefix, document,
)),
document,
);
<|fim▁hole|> }
}<|fim▁end|>
|
n.upcast::<Node>().set_weird_parser_insertion_mode();
n
|
<|file_name|>ember-data.model-fragments.js<|end_file_name|><|fim▁begin|>/*!
* @overview Ember Data Model Fragments
* @copyright Copyright 2015 Lytics Inc. and contributors
* @license Licensed under MIT license
* See https://raw.githubusercontent.com/lytics/ember-data.model-fragments/master/LICENSE
* @version 0.3.3+8b1fcdd5
*/
(function() {
"use strict";
var ember$lib$main$$default = Ember;
var ember$data$lib$main$$default = DS;
var ember$data$lib$serializers$json$api$serializer$$default = DS.JSONAPISerializer;
var ember$data$lib$serializers$json$serializer$$default = DS.JSONSerializer;
var ember$data$lib$system$model$internal$model$$default = DS.InternalModel;
var ember$data$lib$system$model$states$$default = DS.RootState;
var ember$data$lib$system$model$$default = DS.Model;
var ember$data$lib$system$snapshot$$default = DS.Snapshot;
var ember$data$lib$system$store$$default = DS.Store;
var ember$data$lib$system$transform$$default = DS.Transform;
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$states$$get = ember$lib$main$$default.get;
var model$fragments$lib$fragments$states$$create = Object.create || ember$lib$main$$default.create;
var model$fragments$lib$fragments$states$$didSetProperty = ember$data$lib$system$model$states$$default.loaded.saved.didSetProperty;
var model$fragments$lib$fragments$states$$propertyWasReset = ember$data$lib$system$model$states$$default.loaded.updated.uncommitted.propertyWasReset;
var model$fragments$lib$fragments$states$$dirtySetup = function(internalModel) {
var record = internalModel._owner;
var key = internalModel._name;
// A newly created fragment may not have an owner yet
if (record) {
model$fragments$lib$fragments$states$$fragmentDidDirty(record, key, internalModel);
}
};
/**
Like `DS.Model` instances, all fragments have a `currentState` property
that reflects where they are in the model lifecycle. However, there are much
fewer states that a fragment can be in, since the `loading` state doesn't
apply, `inFlight` states are no different than the owner record's, and there
is no concept of a `deleted` state.
This is the simplified hierarchy of valid states for a fragment:
```text
* root
* empty
* loaded
* created
* saved
* updated
```
Note that there are no `uncommitted` sub-states because it's implied by the
`created` and `updated` states (since there are no `inFlight` substates).
@class FragmentRootState
*/
var model$fragments$lib$fragments$states$$FragmentRootState = {
// Include all `DS.Model` state booleans for consistency
isEmpty: false,
isLoading: false,
isLoaded: false,
isDirty: false,
isSaving: false,
isDeleted: false,
isNew: false,
isValid: true,
didSetProperty: model$fragments$lib$fragments$states$$didSetProperty,
propertyWasReset: ember$lib$main$$default.K,
becomeDirty: ember$lib$main$$default.K,
rolledBack: ember$lib$main$$default.K,
empty: {
isEmpty: true,
loadedData: function(internalModel) {
internalModel.transitionTo('loaded.created');
},
pushedData: function(internalModel) {
internalModel.transitionTo('loaded.saved');
}
},
loaded: {
pushedData: function(internalModel) {
internalModel.transitionTo('saved');
},
saved: {
setup: function(internalModel) {
var record = internalModel._owner;
var key = internalModel._name;
// Abort if fragment is still initializing
if (!record._internalModel._fragments[key] || internalModel._isInitializing) { return; }
// Reset the property on the owner record if no other siblings
// are dirty (or there are no siblings)
if (!model$fragments$lib$fragments$states$$get(record, key + '.hasDirtyAttributes')) {
model$fragments$lib$fragments$states$$fragmentDidReset(record, key, internalModel);
}
},
pushedData: ember$lib$main$$default.K,
didCommit: ember$lib$main$$default.K,
becomeDirty: function(internalModel) {
internalModel.transitionTo('updated');
}
},
created: {
isDirty: true,
setup: model$fragments$lib$fragments$states$$dirtySetup,
didCommit: function(internalModel) {
internalModel.transitionTo('saved');
}
},
updated: {
isDirty: true,
setup: model$fragments$lib$fragments$states$$dirtySetup,
propertyWasReset: model$fragments$lib$fragments$states$$propertyWasReset,
didCommit: function(internalModel) {
internalModel.transitionTo('saved');
},
rolledBack: function(internalModel) {
internalModel.transitionTo('saved');
}
}
}
};
function model$fragments$lib$fragments$states$$mixin(original, hash) {
for (var prop in hash) {
original[prop] = hash[prop];
}
return original;
}
// Wouldn't it be awesome if this was public?
function model$fragments$lib$fragments$states$$wireState(object, parent, name) {
object = model$fragments$lib$fragments$states$$mixin(parent ? model$fragments$lib$fragments$states$$create(parent) : {}, object);
object.parentState = parent;
object.stateName = name;
for (var prop in object) {
if (!object.hasOwnProperty(prop) || prop === 'parentState' || prop === 'stateName') {
continue;
}
if (typeof object[prop] === 'object') {
object[prop] = model$fragments$lib$fragments$states$$wireState(object[prop], object, name + "." + prop);
}
}
return object;
}
model$fragments$lib$fragments$states$$FragmentRootState = model$fragments$lib$fragments$states$$wireState(model$fragments$lib$fragments$states$$FragmentRootState, null, 'root');
var model$fragments$lib$fragments$states$$default = model$fragments$lib$fragments$states$$FragmentRootState;
function model$fragments$lib$fragments$states$$fragmentDidDirty(record, key, fragment) {
if (!model$fragments$lib$fragments$states$$get(record, 'isDeleted')) {
// Add the fragment as a placeholder in the owner record's
// `_attributes` hash to indicate it is dirty
record._internalModel._attributes[key] = fragment;
record.send('becomeDirty');
}
}
function model$fragments$lib$fragments$states$$fragmentDidReset(record, key) {
// Make sure there's no entry in the owner record's
// `_attributes` hash to indicate the fragment is dirty
delete record._internalModel._attributes[key];
// Don't reset if the record is new, otherwise it will enter the 'deleted' state
// NOTE: This case almost never happens with attributes because their initial value
// is always undefined, which is *usually* not what attributes get 'reset' to
if (!model$fragments$lib$fragments$states$$get(record, 'isNew')) {
record.send('propertyWasReset', key);
}
}
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$array$stateful$$get = ember$lib$main$$default.get;
var model$fragments$lib$fragments$array$stateful$$set = ember$lib$main$$default.set;
var model$fragments$lib$fragments$array$stateful$$computed = ember$lib$main$$default.computed;
/**
A state-aware array that is tied to an attribute of a `DS.Model` instance.
@class StatefulArray
@namespace DS
@extends Ember.ArrayProxy
*/
var model$fragments$lib$fragments$array$stateful$$StatefulArray = ember$lib$main$$default.ArrayProxy.extend({
/**
A reference to the array's owner record.
@property owner
@type {DS.Model}
*/
owner: null,
/**
The array's property name on the owner record.
@property name
@private
@type {String}
*/
name: null,
init: function() {
this._super();
this._pendingData = undefined;
model$fragments$lib$fragments$array$stateful$$set(this, '_originalState', []);
},
content: model$fragments$lib$fragments$array$stateful$$computed(function() {
return ember$lib$main$$default.A();
}),
/**
@method setupData
@private
@param {Object} data
*/
setupData: function(data) {
// Since replacing the contents of the array can trigger changes to fragment
// array properties, this method can get invoked recursively with the same
// data, so short circuit here once it's been setup the first time
if (this._pendingData === data) {
return;
}
this._pendingData = data;
var processedData = this._processData(data);
// This data is canonical, so create rollback point
model$fragments$lib$fragments$array$stateful$$set(this, '_originalState', processedData);
// Completely replace the contents with the new data
this.replaceContent(0, model$fragments$lib$fragments$array$stateful$$get(this, 'content.length'), processedData);
this._pendingData = undefined;
},
/**
@method _processData
@private
@param {Object} data
*/
_processData: function(data) {
// Simply ensure that the data is an actual array
return ember$lib$main$$default.makeArray(data);
},
/**
@method _createSnapshot
@private
*/
_createSnapshot: function() {
// Since elements are not models, a snapshot is simply a mapping of raw values
return this.toArray();
},
/**
@method adapterDidCommit
@private
*/
_adapterDidCommit: function(data) {
if (data) {
this.setupData(data);
} else {
// Fragment array has been persisted; use the current state as the original state
model$fragments$lib$fragments$array$stateful$$set(this, '_originalState', this.toArray());
}
},
/**
@method isDirty
@deprecated Use `hasDirtyAttributes` instead
*/
isDirty: model$fragments$lib$fragments$array$stateful$$computed('hasDirtyAttributes', function() {
ember$lib$main$$default.deprecate('The `isDirty` method of fragment arrays has been deprecated, please use `hasDirtyAttributes` instead');
return this.get('hasDirtyAttributes');
}),
/**
If this property is `true` the contents of the array do not match its
original state. The array has local changes that have not yet been saved by
the adapter. This includes additions, removals, and reordering of elements.
Example
```javascript
array.toArray(); // [ 'Tom', 'Yehuda' ]
array.get('isDirty'); // false
array.popObject(); // 'Yehuda'
array.get('isDirty'); // true
```
@property hasDirtyAttributes
@type {Boolean}
@readOnly
*/
hasDirtyAttributes: model$fragments$lib$fragments$array$stateful$$computed('[]', '_originalState', function() {
return ember$lib$main$$default.compare(this.toArray(), model$fragments$lib$fragments$array$stateful$$get(this, '_originalState')) !== 0;
}),
/**
@method rollback
@deprecated Use `rollbackAttributes()` instead
*/
rollback: function() {
ember$lib$main$$default.deprecate('Using array.rollback() has been deprecated. Use array.rollbackAttributes() to discard any unsaved changes to fragments in the array.');
this.rollbackAttributes();
},
/**
This method reverts local changes of the array's contents to its original
state.
Example
```javascript
array.toArray(); // [ 'Tom', 'Yehuda' ]
array.popObject(); // 'Yehuda'
array.toArray(); // [ 'Tom' ]
array.rollbackAttributes();
array.toArray(); // [ 'Tom', 'Yehuda' ]
```
@method rollbackAttributes
*/
rollbackAttributes: function() {
this.setObjects(model$fragments$lib$fragments$array$stateful$$get(this, '_originalState'));
},
/**
Method alias for `toArray`.
@method serialize
@return {Array}
*/
serialize: function() {
return this.toArray();
},
arrayContentDidChange: function() {
this._super.apply(this, arguments);
var record = model$fragments$lib$fragments$array$stateful$$get(this, 'owner');
var key = model$fragments$lib$fragments$array$stateful$$get(this, 'name');
// Any change to the size of the fragment array means a potential state change
if (model$fragments$lib$fragments$array$stateful$$get(this, 'hasDirtyAttributes')) {
model$fragments$lib$fragments$states$$fragmentDidDirty(record, key, this);
} else {
model$fragments$lib$fragments$states$$fragmentDidReset(record, key);
}
},
toStringExtension: function() {
return 'owner(' + model$fragments$lib$fragments$array$stateful$$get(this, 'owner.id') + ')';
}
});
var model$fragments$lib$fragments$array$stateful$$default = model$fragments$lib$fragments$array$stateful$$StatefulArray;
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$ext$$keys = Object.keys || Ember.keys;
/**
@class Store
@namespace DS
*/
ember$data$lib$system$store$$default.reopen({
/**
Create a new fragment that does not yet have an owner record.
The properties passed to this method are set on the newly created
fragment.
To create a new instance of the `name` fragment:
```js
store.createFragment('name', {
first: "Alex",
last: "Routé"
});
```
@method createRecord
@param {String} type
@param {Object} properties a hash of properties to set on the
newly created fragment.
@return {DS.ModelFragment} fragment
*/
createFragment: function(modelName, props) {
var type = this.modelFor(modelName);
Ember.assert("The '" + type + "' model must be a subclass of DS.ModelFragment", model$fragments$lib$fragments$model$$default.detect(type));
var internalModel = new ember$data$lib$system$model$internal$model$$default(type, null, this, this.container);
// Re-wire the internal model to use the fragment state machine
internalModel.currentState = model$fragments$lib$fragments$states$$default.empty;
internalModel._name = null;
internalModel._owner = null;
internalModel.loadedData();
var fragment = internalModel.getRecord();
if (props) {
fragment.setProperties(props);
}
return fragment;
}
});
/**
@class Model
@namespace DS
*/
ember$data$lib$system$model$$default.reopen({
/**
Returns an object, whose keys are changed properties, and value is
an [oldProp, newProp] array. When the model has fragments that have
changed, the property value is simply `true`.
Example
```javascript
App.Mascot = DS.Model.extend({
type: DS.attr('string'),
name: DS.hasOneFragment('name')
});
App.Name = DS.Model.extend({
first : DS.attr('string'),
last : DS.attr('string')
});
var person = store.createRecord('person');
person.changedAttributes(); // {}
person.get('name').set('first', 'Tomster');
person.set('type', 'Hamster');
person.changedAttributes(); // { name: true, type: [undefined, 'Hamster'] }
```
@method changedAttributes
@return {Object} an object, whose keys are changed properties,
and value is an [oldProp, newProp] array.
*/
changedAttributes: function() {
var diffData = this._super();
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(this);
model$fragments$lib$fragments$ext$$keys(internalModel._fragments).forEach(function(name) {
// An actual diff of the fragment or fragment array is outside the scope
// of this method, so just indicate that there is a change instead
if (name in internalModel._attributes) {
diffData[name] = true;
}
}, this);
return diffData;
},
});
// Replace a method on an object with a new one that calls the original and then
// invokes a function with the result
function model$fragments$lib$fragments$ext$$decorateMethod(obj, name, fn) {
var originalFn = obj[name];
obj[name] = function() {
var value = originalFn.apply(this, arguments);
return fn.call(this, value, arguments);
};
}
var model$fragments$lib$fragments$ext$$InternalModelPrototype = ember$data$lib$system$model$internal$model$$default.prototype;
/**
Override parent method to snapshot fragment attributes before they are
passed to the `DS.Model#serialize`.
@method _createSnapshot
@private
*/
model$fragments$lib$fragments$ext$$decorateMethod(model$fragments$lib$fragments$ext$$InternalModelPrototype, 'createSnapshot', function createFragmentSnapshot(snapshot) {
var attrs = snapshot._attributes;
model$fragments$lib$fragments$ext$$keys(attrs).forEach(function(key) {
var attr = attrs[key];
// If the attribute has a `_createSnapshot` method, invoke it before the
// snapshot gets passed to the serializer
if (attr && typeof attr._createSnapshot === 'function') {
attrs[key] = attr._createSnapshot();
}
});
return snapshot;
});
/**
If the model `hasDirtyAttributes` this function will discard any unsaved
changes, recursively doing the same for all fragment properties.
Example
```javascript
record.get('name'); // 'Untitled Document'
record.set('name', 'Doc 1');
record.get('name'); // 'Doc 1'
record.rollbackAttributes();
record.get('name'); // 'Untitled Document'
```
@method rollbackAttributes
*/
model$fragments$lib$fragments$ext$$decorateMethod(model$fragments$lib$fragments$ext$$InternalModelPrototype, 'rollbackAttributes', function rollbackFragments() {
for (var key in this._fragments) {
if (this._fragments[key]) {
this._fragments[key].rollbackAttributes();
}
}
});
/**
If the adapter did not return a hash in response to a commit,
merge the changed attributes and relationships into the existing
saved data and notify all fragments of the commit.
@method adapterDidCommit
*/
model$fragments$lib$fragments$ext$$decorateMethod(model$fragments$lib$fragments$ext$$InternalModelPrototype, 'adapterDidCommit', function adapterDidCommit(returnValue, args) {
var attributes = (args[0] && args[0].attributes) || {};
var fragment;
// Notify fragments that the record was committed
for (var key in this._fragments) {
if (fragment = this._fragments[key]) {
fragment._adapterDidCommit(attributes[key]);
}
}
});
/**
@class JSONSerializer
@namespace DS
*/
ember$data$lib$serializers$json$serializer$$default.reopen({
/**
Enables fragment properties to have custom transforms based on the fragment
type, so that deserialization does not have to happen on the fly
@method transformFor
@private
*/
transformFor: function(attributeType) {
if (attributeType.indexOf('-mf-') === 0) {
return model$fragments$lib$fragments$ext$$getFragmentTransform(this.container, this.store, attributeType);
}
return this._super.apply(this, arguments);
}
});
// Retrieve or create a transform for the specific fragment type
function model$fragments$lib$fragments$ext$$getFragmentTransform(container, store, attributeType) {
var registry = container._registry || container;
var containerKey = 'transform:' + attributeType;
var match = attributeType.match(/^-mf-(fragment|fragment-array|array)(?:\$([^$]+))?(?:\$(.+))?$/);
var transformType = match[1];
var modelName = match[2];
var polymorphicTypeProp = match[3];
if (!registry.has(containerKey)) {
var transformClass = container.lookupFactory('transform:' + transformType);
registry.register(containerKey, transformClass.extend({
store: store,
modelName: modelName,
polymorphicTypeProp: polymorphicTypeProp
}));
}
return container.lookup(containerKey);
}
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$model$$get = ember$lib$main$$default.get;
var model$fragments$lib$fragments$model$$create = Object.create || ember$lib$main$$default.create;
var model$fragments$lib$fragments$model$$merge = ember$lib$main$$default.merge;
/**
The class that all nested object structures, or 'fragments', descend from.
Fragments are bound to a single 'owner' record (an instance of `DS.Model`)
and cannot change owners once set. They behave like models, but they have
no `save` method since their persistence is managed entirely through their
owner. Because of this, a fragment's state directly influences its owner's
state, e.g. when a record's fragment `hasDirtyAttributes`, its owner
`hasDirtyAttributes`.
Example:
```javascript
App.Person = DS.Model.extend({
name: DS.hasOneFragment('name')
});
App.Name = DS.ModelFragment.extend({
first : DS.attr('string'),
last : DS.attr('string')
});
```
With JSON response:
```json
{
"id": "1",
"name": {
"first": "Robert",
"last": "Jackson"
}
}
```
```javascript
var person = store.getbyid('person', '1');
var name = person.get('name');
person.get('hasDirtyAttributes'); // false
name.get('hasDirtyAttributes'); // false
name.get('first'); // 'Robert'
name.set('first', 'The Animal');
name.get('hasDirtyAttributes'); // true
person.get('hasDirtyAttributes'); // true
person.rollbackAttributes();
name.get('first'); // 'Robert'
person.get('hasDirtyAttributes'); // false
person.get('hasDirtyAttributes'); // false
```
@class ModelFragment
@namespace DS
@extends CoreModel
@uses Ember.Comparable
@uses Ember.Copyable
*/
var model$fragments$lib$fragments$model$$ModelFragment = ember$data$lib$system$model$$default.extend(ember$lib$main$$default.Comparable, ember$lib$main$$default.Copyable, {
/**
Compare two fragments by identity to allow `FragmentArray` to diff arrays.
@method compare
@param a {DS.ModelFragment} the first fragment to compare
@param b {DS.ModelFragment} the second fragment to compare
@return {Integer} the result of the comparison
*/
compare: function(f1, f2) {
return f1 === f2 ? 0 : 1;
},
/**
Create a new fragment that is a copy of the current fragment. Copied
fragments do not have the same owner record set, so they may be added
to other records safely.
@method copy
@return {DS.ModelFragment} the newly created fragment
*/
copy: function() {
var data = {};
// TODO: handle copying sub-fragments
model$fragments$lib$fragments$model$$merge(data, this._data);
model$fragments$lib$fragments$model$$merge(data, this._attributes);
return this.store.createFragment(this.constructor.modelName, data);
},
/**
@method adapterDidCommit
*/
_adapterDidCommit: function(data) {
model$fragments$lib$fragments$model$$internalModelFor(this).setupData({
attributes: data || {}
});
},
toStringExtension: function() {
return 'owner(' + model$fragments$lib$fragments$model$$get(model$fragments$lib$fragments$model$$internalModelFor(this)._owner, 'id') + ')';
}
});
function model$fragments$lib$fragments$model$$getActualFragmentType(declaredType, options, data) {
if (!options.polymorphic || !data) {
return declaredType;
}
var typeKey = options.typeKey || 'type';<|fim▁hole|> return actualType || declaredType;
}
function model$fragments$lib$fragments$model$$internalModelFor(record) {
var internalModel = record._internalModel;
// Ensure the internal model has a fragments hash, since we can't override the
// constructor function anymore
if (!internalModel._fragments) {
internalModel._fragments = model$fragments$lib$fragments$model$$create(null);
}
return internalModel;
}
function model$fragments$lib$fragments$model$$setFragmentOwner(fragment, record, key) {
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(fragment);
ember$lib$main$$default.assert("Fragments can only belong to one owner, try copying instead", !internalModel._owner || internalModel._owner === record);
internalModel._owner = record;
internalModel._name = key;
return fragment;
}
var model$fragments$lib$fragments$model$$default = model$fragments$lib$fragments$model$$ModelFragment;
function model$fragments$lib$util$map$$map(obj, callback, thisArg) {
return obj.map ? obj.map(callback, thisArg) : model$fragments$lib$util$map$$mapPolyfill.call(obj, callback, thisArg);
}
var model$fragments$lib$util$map$$default = model$fragments$lib$util$map$$map;
// https://github.com/emberjs/ember.js/blob/v1.11.0/packages/ember-metal/lib/array.js
function model$fragments$lib$util$map$$mapPolyfill(fun /*, thisp */) {
if (this === void 0 || this === null || typeof fun !== "function") {
throw new TypeError();
}
var t = Object(this);
var len = t.length >>> 0;
var res = new Array(len);
var thisp = arguments[1];
for (var i = 0; i < len; i++) {
if (i in t) {
res[i] = fun.call(thisp, t[i], i, t);
}
}
return res;
}
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$array$fragment$$get = ember$lib$main$$default.get;
var model$fragments$lib$fragments$array$fragment$$computed = ember$lib$main$$default.computed;
/**
A state-aware array of fragments that is tied to an attribute of a `DS.Model`
instance. `FragmentArray` instances should not be created directly, instead
use the `DS.hasManyFragments` attribute.
@class FragmentArray
@namespace DS
@extends StatefulArray
*/
var model$fragments$lib$fragments$array$fragment$$FragmentArray = model$fragments$lib$fragments$array$stateful$$default.extend({
/**
The type of fragments the array contains
@property type
@private
@type {String}
*/
type: null,
options: null,
init: function() {
this._super();
this._isInitializing = false;
},
/**
@method _processData
@private
@param {Object} data
*/
_processData: function(data) {
var record = model$fragments$lib$fragments$array$fragment$$get(this, 'owner');
var store = model$fragments$lib$fragments$array$fragment$$get(record, 'store');
var declaredType = model$fragments$lib$fragments$array$fragment$$get(this, 'type');
var options = model$fragments$lib$fragments$array$fragment$$get(this, 'options');
var key = model$fragments$lib$fragments$array$fragment$$get(this, 'name');
var content = model$fragments$lib$fragments$array$fragment$$get(this, 'content');
// Mark the fragment array as initializing so that state changes are ignored
// until after all fragments' data is setup
this._isInitializing = true;
// Map data to existing fragments and create new ones where necessary
var processedData = model$fragments$lib$util$map$$default(ember$lib$main$$default.makeArray(data), function(data, i) {
var fragment = content[i];
// Create a new fragment from the data array if needed
if (!fragment) {
var actualType = model$fragments$lib$fragments$model$$getActualFragmentType(declaredType, options, data);
fragment = store.createFragment(actualType);
model$fragments$lib$fragments$model$$setFragmentOwner(fragment, record, key);
}
// Initialize the fragment with the data
model$fragments$lib$fragments$model$$internalModelFor(fragment).setupData({
attributes: data
});
return fragment;
});
this._isInitializing = false;
return processedData;
},
/**
@method _createSnapshot
@private
*/
_createSnapshot: function() {
// Snapshot each fragment
return this.map(function(fragment) {
return fragment._createSnapshot();
});
},
/**
@method adapterDidCommit
@private
*/
_adapterDidCommit: function(data) {
this._super(data);
// If the adapter update did not contain new data, just notify each fragment
// so it can transition to a clean state
if (!data) {
// Notify all records of commit
this.forEach(function(fragment) {
fragment._adapterDidCommit();
});
}
},
/**
If this property is `true`, either the contents of the array do not match
its original state, or one or more of the fragments in the array are dirty.
Example
```javascript
array.toArray(); // [ <Fragment:1>, <Fragment:2> ]
array.get('hasDirtyAttributes'); // false
array.get('firstObject').set('prop', 'newValue');
array.get('hasDirtyAttributes'); // true
```
@property hasDirtyAttributes
@type {Boolean}
@readOnly
*/
hasDirtyAttributes: model$fragments$lib$fragments$array$fragment$$computed('@each.hasDirtyAttributes', '_originalState', function() {
return this._super() || this.isAny('hasDirtyAttributes');
}),
/**
This method reverts local changes of the array's contents to its original
state, and calls `rollbackAttributes` on each fragment.
Example
```javascript
array.get('firstObject').get('hasDirtyAttributes'); // true
array.get('hasDirtyAttributes'); // true
array.rollbackAttributes();
array.get('firstObject').get('hasDirtyAttributes'); // false
array.get('hasDirtyAttributes'); // false
```
@method rollbackAttributes
*/
rollbackAttributes: function() {
this._super();
this.invoke('rollbackAttributes');
},
/**
Serializing a fragment array returns a new array containing the results of
calling `serialize` on each fragment in the array.
@method serialize
@return {Array}
*/
serialize: function() {
return this.invoke('serialize');
},
replaceContent: function(idx, amt, fragments) {
var array = this;
var record = model$fragments$lib$fragments$array$fragment$$get(this, 'owner');
var key = model$fragments$lib$fragments$array$fragment$$get(this, 'name');
// Since all array manipulation methods end up using this method, ensure
// ensure that fragments are the correct type and have an owner and name
if (fragments) {
fragments.forEach(function(fragment) {
var owner = model$fragments$lib$fragments$model$$internalModelFor(fragment)._owner;
ember$lib$main$$default.assert("Fragments can only belong to one owner, try copying instead", !owner || owner === record);
ember$lib$main$$default.assert("You can only add '" + model$fragments$lib$fragments$array$fragment$$get(array, 'type') + "' fragments to this property", (function (type) {
if (fragment instanceof type) {
return true;
} else if (ember$lib$main$$default.MODEL_FACTORY_INJECTIONS) {
return fragment instanceof type.superclass;
}
return false;
})(model$fragments$lib$fragments$array$fragment$$get(record, 'store').modelFor(model$fragments$lib$fragments$array$fragment$$get(array, 'type'))));
if (!owner) {
model$fragments$lib$fragments$model$$setFragmentOwner(fragment, record, key);
}
});
}
return model$fragments$lib$fragments$array$fragment$$get(this, 'content').replace(idx, amt, fragments);
},
/**
Adds an existing fragment to the end of the fragment array. Alias for
`addObject`.
@method addFragment
@param {DS.ModelFragment} fragment
@return {DS.ModelFragment} the newly added fragment
*/
addFragment: function(fragment) {
return this.addObject(fragment);
},
/**
Removes the given fragment from the array. Alias for `removeObject`.
@method removeFragment
@param {DS.ModelFragment} fragment
@return {DS.ModelFragment} the removed fragment
*/
removeFragment: function(fragment) {
return this.removeObject(fragment);
},
/**
Creates a new fragment of the fragment array's type and adds it to the end
of the fragment array
@method createFragment
@param {DS.ModelFragment} fragment
@return {DS.ModelFragment} the newly added fragment
*/
createFragment: function(props) {
var record = model$fragments$lib$fragments$array$fragment$$get(this, 'owner');
var store = model$fragments$lib$fragments$array$fragment$$get(record, 'store');
var type = model$fragments$lib$fragments$array$fragment$$get(this, 'type');
var fragment = store.createFragment(type, props);
return this.pushObject(fragment);
}
});
var model$fragments$lib$fragments$array$fragment$$default = model$fragments$lib$fragments$array$fragment$$FragmentArray;
var model$fragments$lib$util$ember$new$computed$$Ember = window.Ember;
var model$fragments$lib$util$ember$new$computed$$computed = model$fragments$lib$util$ember$new$computed$$Ember.computed;
var model$fragments$lib$util$ember$new$computed$$supportsSetterGetter;
try {
model$fragments$lib$util$ember$new$computed$$Ember.computed({
set: function() { },
get: function() { }
});
model$fragments$lib$util$ember$new$computed$$supportsSetterGetter = true;
} catch(e) {
model$fragments$lib$util$ember$new$computed$$supportsSetterGetter = false;
}
var model$fragments$lib$util$ember$new$computed$$default = function() {
var polyfillArguments = [];
var config = arguments[arguments.length - 1];
if (typeof config === 'function' || model$fragments$lib$util$ember$new$computed$$supportsSetterGetter) {
return model$fragments$lib$util$ember$new$computed$$computed.apply(this, arguments);
}
for (var i = 0, l = arguments.length - 1; i < l; i++) {
polyfillArguments.push(arguments[i]);
}
var func;
if (config.set) {
func = function(key, value) {
if (arguments.length > 1) {
return config.set.call(this, key, value);
} else {
return config.get.call(this, key);
}
};
} else {
func = function(key) {
return config.get.call(this, key);
};
}
polyfillArguments.push(func);
return model$fragments$lib$util$ember$new$computed$$computed.apply(this, polyfillArguments);
};
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$attributes$$get = ember$lib$main$$default.get;
// Create a unique type string for the combination of fragment property type,
// fragment model name, and polymorphic type key
function model$fragments$lib$fragments$attributes$$metaTypeFor(type, modelName, options) {
var metaType = '-mf-' + type;
if (modelName) {
metaType += '$' + modelName;
}
if (options && options.polymorphic) {
metaType += '$' + (options.typeKey || 'type');
}
return metaType;
}
/**
`DS.hasOneFragment` defines an attribute on a `DS.Model` or `DS.ModelFragment`
instance. Much like `DS.belongsTo`, it creates a property that returns a
single fragment of the given type.
`DS.hasOneFragment` takes an optional hash as a second parameter, currently
supported options are:
- `defaultValue`: An object literal or a function to be called to set the
attribute to a default value if none is supplied. Values are deep copied
before being used. Note that default values will be passed through the
fragment's serializer when creating the fragment.
Example
```javascript
App.Person = DS.Model.extend({
name: DS.hasOneFragment('name', { defaultValue: {} })
});
App.Name = DS.ModelFragment.extend({
first : DS.attr('string'),
last : DS.attr('string')
});
```
@namespace
@method hasOneFragment
@for DS
@param {String} type the fragment type
@param {Object} options a hash of options
@return {Attribute}
*/
function model$fragments$lib$fragments$attributes$$hasOneFragment(declaredModelName, options) {
options = options || {};
var metaType = model$fragments$lib$fragments$attributes$$metaTypeFor('fragment', declaredModelName, options);
function setupFragment(store, record, key) {
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(record);
var data = internalModel._data[key] || model$fragments$lib$fragments$attributes$$getDefaultValue(internalModel, options, 'object');
var fragment = internalModel._fragments[key];
var actualTypeName = model$fragments$lib$fragments$model$$getActualFragmentType(declaredModelName, options, data);
// Regardless of whether being called as a setter or getter, the fragment
// may not be initialized yet, in which case the data will contain a
// raw response or a stashed away fragment
// If we already have a processed fragment in _data and our current fragmet is
// null simply reuse the one from data. We can be in this state after a rollback
// for example
if (!fragment && model$fragments$lib$fragments$attributes$$isInstanceOfType(store.modelFor(actualTypeName), data)) {
fragment = data;
// Else initialize the fragment
} else if (data && data !== fragment) {
fragment || (fragment = model$fragments$lib$fragments$model$$setFragmentOwner(store.createFragment(actualTypeName), record, key));
// Make sure to first cache the fragment before calling setupData, so if setupData causes this CP to be accessed
// again we have it cached already
internalModel._data[key] = fragment;
model$fragments$lib$fragments$model$$internalModelFor(fragment).setupData({
attributes: data
});
} else {
// Handle the adapter setting the fragment to null
fragment = data;
}
return fragment;
}
function setFragmentValue(record, key, fragment, value) {
ember$lib$main$$default.assert("You can only assign a '" + declaredModelName + "' fragment to this property", value === null || model$fragments$lib$fragments$attributes$$isInstanceOfType(record.store.modelFor(declaredModelName), value));
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(record);
fragment = value ? model$fragments$lib$fragments$model$$setFragmentOwner(value, record, key) : null;
if (internalModel._data[key] !== fragment) {
model$fragments$lib$fragments$states$$fragmentDidDirty(record, key, fragment);
} else {
model$fragments$lib$fragments$states$$fragmentDidReset(record, key);
}
return fragment;
}
return model$fragments$lib$fragments$attributes$$fragmentProperty(metaType, options, setupFragment, setFragmentValue);
}
// Check whether a fragment is an instance of the given type, respecting model
// factory injections
function model$fragments$lib$fragments$attributes$$isInstanceOfType(type, fragment) {
if (fragment instanceof type) {
return true;
} else if (ember$lib$main$$default.MODEL_FACTORY_INJECTIONS) {
return fragment instanceof type.superclass;
}
return false;
}
/**
`DS.hasManyFragments` defines an attribute on a `DS.Model` or
`DS.ModelFragment` instance. Much like `DS.hasMany`, it creates a property
that returns an array of fragments of the given type. The array is aware of
its original state and so has a `hasDirtyAttributes` property and a `rollback` method.
If a fragment type is not given, values are not converted to fragments, but
passed straight through.
`DS.hasOneFragment` takes an optional hash as a second parameter, currently
supported options are:
- `defaultValue`: An array literal or a function to be called to set the
attribute to a default value if none is supplied. Values are deep copied
before being used. Note that default values will be passed through the
fragment's serializer when creating the fragment.
Example
```javascript
App.Person = DS.Model.extend({
addresses: DS.hasManyFragments('address', { defaultValue: [] })
});
App.Address = DS.ModelFragment.extend({
street : DS.attr('string'),
city : DS.attr('string'),
region : DS.attr('string'),
country : DS.attr('string')
});
```
@namespace
@method hasManyFragments
@for DS
@param {String} type the fragment type (optional)
@param {Object} options a hash of options
@return {Attribute}
*/
function model$fragments$lib$fragments$attributes$$hasManyFragments(modelName, options) {
options || (options = {});
// If a modelName is not given, it implies an array of primitives
if (ember$lib$main$$default.typeOf(modelName) !== 'string') {
return model$fragments$lib$fragments$attributes$$arrayProperty(options);
}
var metaType = model$fragments$lib$fragments$attributes$$metaTypeFor('fragment-array', modelName, options);
return model$fragments$lib$fragments$attributes$$fragmentArrayProperty(metaType, options, function createFragmentArray(record, key) {
return model$fragments$lib$fragments$array$fragment$$default.create({
type: modelName,
options: options,
name: key,
owner: record
});
});
}
function model$fragments$lib$fragments$attributes$$arrayProperty(options) {
options || (options = {});
var metaType = model$fragments$lib$fragments$attributes$$metaTypeFor('array');
return model$fragments$lib$fragments$attributes$$fragmentArrayProperty(metaType, options, function createStatefulArray(record, key) {
return model$fragments$lib$fragments$array$stateful$$default.create({
options: options,
name: key,
owner: record
});
});
}
function model$fragments$lib$fragments$attributes$$fragmentProperty(type, options, setupFragment, setFragmentValue) {
options = options || {};
var meta = {
type: type,
isAttribute: true,
isFragment: true,
options: options
};
return model$fragments$lib$util$ember$new$computed$$default({
get: function(key) {
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(this);
var fragment = setupFragment(this.store, this, key);
return internalModel._fragments[key] = fragment;
},
set: function(key, value) {
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(this);
var fragment = setupFragment(this.store, this, key);
fragment = setFragmentValue(this, key, fragment, value);
return internalModel._fragments[key] = fragment;
}
}).meta(meta);
}
function model$fragments$lib$fragments$attributes$$fragmentArrayProperty(metaType, options, createArray) {
function setupFragmentArray(store, record, key) {
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(record);
var data = internalModel._data[key] || model$fragments$lib$fragments$attributes$$getDefaultValue(internalModel, options, 'array');
var fragments = internalModel._fragments[key] || null;
// If we already have a processed fragment in _data and our current fragmet is
// null simply reuse the one from data. We can be in this state after a rollback
// for example
if (data instanceof model$fragments$lib$fragments$array$stateful$$default && !fragments) {
fragments = data;
// Create a fragment array and initialize with data
} else if (data && data !== fragments) {
fragments || (fragments = createArray(record, key));
internalModel._data[key] = fragments;
fragments.setupData(data);
} else {
// Handle the adapter setting the fragment array to null
fragments = data;
}
return fragments;
}
function setFragmentValue(record, key, fragments, value) {
var internalModel = model$fragments$lib$fragments$model$$internalModelFor(record);
if (ember$lib$main$$default.isArray(value)) {
fragments || (fragments = createArray(record, key));
fragments.setObjects(value);
} else if (value === null) {
fragments = null;
} else {
ember$lib$main$$default.assert("A fragment array property can only be assigned an array or null");
}
if (internalModel._data[key] !== fragments || model$fragments$lib$fragments$attributes$$get(fragments, 'hasDirtyAttributes')) {
model$fragments$lib$fragments$states$$fragmentDidDirty(record, key, fragments);
} else {
model$fragments$lib$fragments$states$$fragmentDidReset(record, key);
}
return fragments;
}
return model$fragments$lib$fragments$attributes$$fragmentProperty(metaType, options, setupFragmentArray, setFragmentValue);
}
// Like `DS.belongsTo`, when used within a model fragment is a reference
// to the owner record
/**
`DS.fragmentOwner` defines a read-only attribute on a `DS.ModelFragment`
instance. The attribute returns a reference to the fragment's owner
record.
Example
```javascript
App.Person = DS.Model.extend({
name: DS.hasOneFragment('name')
});
App.Name = DS.ModelFragment.extend({
first : DS.attr('string'),
last : DS.attr('string'),
person : DS.fragmentOwner()
});
```
@namespace
@method fragmentOwner
@for DS
@return {Attribute}
*/
function model$fragments$lib$fragments$attributes$$fragmentOwner() {
// TODO: add a warning when this is used on a non-fragment
return ember$lib$main$$default.computed(function() {
return model$fragments$lib$fragments$model$$internalModelFor(this)._owner;
}).readOnly();
}
// The default value of a fragment is either an array or an object,
// which should automatically get deep copied
function model$fragments$lib$fragments$attributes$$getDefaultValue(record, options, type) {
var value;
if (typeof options.defaultValue === "function") {
value = options.defaultValue();
} else if (options.defaultValue) {
value = options.defaultValue;
} else {
return null;
}
ember$lib$main$$default.assert("The fragment's default value must be an " + type, ember$lib$main$$default.typeOf(value) == type);
// Create a deep copy of the resulting value to avoid shared reference errors
return ember$lib$main$$default.copy(value, true);
}
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$transforms$array$$makeArray = ember$lib$main$$default.makeArray;
/**
Transform for array-like attributes fragment attribute with no model
@class ArrayTransform
@namespace DS
@extends DS.Transform
*/
var model$fragments$lib$fragments$transforms$array$$ArrayTransform = ember$data$lib$system$transform$$default.extend({
deserialize: function deserializeArray(data) {
return data == null ? null : model$fragments$lib$fragments$transforms$array$$makeArray(data);
},
serialize: function serializeArray(array) {
return array && array.toArray ? array.toArray() : array;
}
});
var model$fragments$lib$fragments$transforms$array$$default = model$fragments$lib$fragments$transforms$array$$ArrayTransform;
/**
@module ember-data.model-fragments
*/
var model$fragments$lib$fragments$transforms$fragment$$get = ember$lib$main$$default.get;
/**
Transform for `DS.hasOneFragment` fragment attribute which delegates work to
the fragment type's serializer
@class FragmentTransform
@namespace DS
@extends DS.Transform
*/
var model$fragments$lib$fragments$transforms$fragment$$FragmentTransform = ember$data$lib$system$transform$$default.extend({
store: null,
modelName: null,
polymorphicTypeProp: null,
deserialize: function deserializeFragment(data) {
if (data == null) {
return null;
}
return this.deserializeSingle(data);
},
serialize: function serializeFragment(snapshot) {
if (!snapshot) {
return null;
}
var store = this.store;
var serializer = store.serializerFor(snapshot.modelName);
return serializer.serialize(snapshot);
},
modelNameFor: function modelNameFor(data) {
var modelName = model$fragments$lib$fragments$transforms$fragment$$get(this, 'modelName');
var polymorphicTypeProp = model$fragments$lib$fragments$transforms$fragment$$get(this, 'polymorphicTypeProp');
if (data && polymorphicTypeProp && data[polymorphicTypeProp]) {
modelName = data[polymorphicTypeProp];
}
return modelName;
},
deserializeSingle: function deserializeSingle(data) {
var store = this.store;
var modelName = this.modelNameFor(data);
var serializer = store.serializerFor(modelName);
ember$lib$main$$default.assert("The `JSONAPISerializer` is not suitable for model fragments, please use `JSONSerializer`", !(serializer instanceof ember$data$lib$serializers$json$api$serializer$$default));
var isNewSerializerAPI = model$fragments$lib$fragments$transforms$fragment$$get(serializer, 'isNewSerializerAPI');
var typeClass = store.modelFor(modelName);
var serialized = serializer.normalize(typeClass, data);
// The new serializer API returns a full JSON API document, but we only need
// the attributes hash
if (isNewSerializerAPI) {
return model$fragments$lib$fragments$transforms$fragment$$get(serialized, 'data.attributes');
} else {
return serialized;
}
}
});
var model$fragments$lib$fragments$transforms$fragment$$default = model$fragments$lib$fragments$transforms$fragment$$FragmentTransform;
/**
@module ember-data.model-fragments
*/
/**
Transform for `DS.hasManyFragments` fragment attribute which delegates work to
the fragment type's serializer
@class FragmentArrayTransform
@namespace DS
@extends DS.Transform
*/
var model$fragments$lib$fragments$transforms$fragment$array$$FragmentArrayTransform = model$fragments$lib$fragments$transforms$fragment$$default.extend({
deserialize: function deserializeFragmentArray(data) {
if (data == null) {
return null;
}
return model$fragments$lib$util$map$$default(data, function(datum) {
return this.deserializeSingle(datum);
}, this);
},
serialize: function serializeFragmentArray(snapshots) {
if (!snapshots) {
return null;
}
var store = this.store;
return model$fragments$lib$util$map$$default(snapshots, function(snapshot) {
var serializer = store.serializerFor(snapshot.modelName);
return serializer.serialize(snapshot);
});
}
});
var model$fragments$lib$fragments$transforms$fragment$array$$default = model$fragments$lib$fragments$transforms$fragment$array$$FragmentArrayTransform;
var model$fragments$lib$initializers$$initializers = [
{
name: "fragmentTransform",
before: "store",
initialize: function(container, application) {
application.register('transform:fragment', model$fragments$lib$fragments$transforms$fragment$$default);
application.register('transform:fragment-array', model$fragments$lib$fragments$transforms$fragment$array$$default);
application.register('transform:array', model$fragments$lib$fragments$transforms$array$$default);
}
}
];
var model$fragments$lib$initializers$$default = model$fragments$lib$initializers$$initializers;
function model$fragments$lib$main$$exportMethods(scope) {
scope.ModelFragment = model$fragments$lib$fragments$model$$default;
scope.FragmentArray = model$fragments$lib$fragments$array$fragment$$default;
scope.FragmentTransform = model$fragments$lib$fragments$transforms$fragment$$default;
scope.FragmentArrayTransform = model$fragments$lib$fragments$transforms$fragment$array$$default;
scope.ArrayTransform = model$fragments$lib$fragments$transforms$array$$default;
scope.hasOneFragment = model$fragments$lib$fragments$attributes$$hasOneFragment;
scope.hasManyFragments = model$fragments$lib$fragments$attributes$$hasManyFragments;
scope.fragmentOwner = model$fragments$lib$fragments$attributes$$fragmentOwner;
}
/**
Ember Data Model Fragments
@module ember-data.model-fragments
@main ember-data.model-fragments
*/
var model$fragments$lib$main$$MF = ember$lib$main$$default.Namespace.create({
VERSION: '0.3.3+8b1fcdd5'
});
model$fragments$lib$main$$exportMethods(model$fragments$lib$main$$MF);
// This will be removed at some point in favor of the `MF` namespace
model$fragments$lib$main$$exportMethods(ember$data$lib$main$$default);
ember$lib$main$$default.onLoad('Ember.Application', function(Application) {
model$fragments$lib$initializers$$default.forEach(Application.initializer, Application);
});
if (ember$lib$main$$default.libraries) {
ember$lib$main$$default.libraries.register('Model Fragments', model$fragments$lib$main$$MF.VERSION);
}
var model$fragments$lib$main$$default = model$fragments$lib$main$$MF;
}).call(this);
//# sourceMappingURL=ember-data.model-fragments.map<|fim▁end|>
|
var actualType = data[typeKey];
|
<|file_name|>sidebar.js<|end_file_name|><|fim▁begin|>/**
* 首页
*/
pageData = window.pageData?window.pageData:[];
var vm = avalon.define({
$id : "sidebar",
test: "tst",
//domainBuyList1 : [{title:"test"},{title:"test2"}],
//domainSoldList1 : [{title:"test"},{title:"test2"}],//买标信息一览(最新11条,首页只表示最新的)
datas : {
domainArticleList : [],//sidebar信息一览(最新5条)12.31
timeStamp: 0,
tmp : {
currentClientId : null,
newDate : new Date()
},
userinfo : {
id : ""
}
},
});
// 初始化动作
$(function(){
avalon.scan();
//TODO
});
//sidebar信息一览(最新5条)12.31
var interval_GbjArticle_status_check = function() {
//var v = window.location.href.split("?")[1].substring(3,35);
//alert(u);
var count = 5;//首页最多显示11条
//var id= v;
$.post(
"polling/GbjArticle.json",
<|fim▁hole|> },
function(res) {
if (res.type == "success") {
//vm.datas.domainArticleList.clear();
vm.datas.domainArticleList.pushArray(res.data.GbjArticle);
timeout_GbjArticle = setTimeout(interval_GbjArticle_status_check, 30000); //30秒自动刷新一次
}
}
);
}
interval_GbjArticle_status_check();<|fim▁end|>
|
{
count : count //参数1,检索的limit条数
// id:id
|
<|file_name|>mm_proposal_wo_kivi.py<|end_file_name|><|fim▁begin|># python3
"""
Mastermind without kivy - by Luis
merciless edited by hans
"""
import random
import re
class G():
valid_chars = '123456'
secret_len = 5
solved = '+' * secret_len<|fim▁hole|>def main():
secret = answer_generator()
print('Enter your guess of {} of these symbols: ({})'
.format(G.secret_len, G.valid_chars))
while True:
user_seq = user_guess()
output = handle_game(secret, user_seq)
result_msg = ('{} -> {}')
print(result_msg.format(user_seq, output))
if output == G.solved:
break
print('You have found the answer! Goodbye!')
def handle_game(answer, guess):
answer = list(answer) # no need to str() or to assign a new name
guess = list(guess)
output = ''
for i, ch in enumerate(guess):
if ch == answer[i]:
# eliminate hits from both lists, but leave position untouched
guess[i] = '°' # any char which is not in valid_chars
answer[i] = '^'
output += '+'
for ch in guess:
if ch in answer:
# remove hit from answer, position is no longer important
answer.remove(ch)
output += '-'
return output
def user_guess():
while True:
response = input() # no argument needed, default is ''
if G.valid_input.match(response):
return response
print("wrong input...")
def answer_generator(): # Creates random sequence of n characters
seq = ''
for _ in range(G.secret_len): # '_': we dont care for the value
seq += random.choice(G.valid_chars) # valid_chars string is iterable
return seq
if __name__ == '__main__':
main()<|fim▁end|>
|
regex_str = "^[{0}]{{{1},{1}}}$".format(valid_chars, secret_len)
valid_input = re.compile(regex_str) # regular expression for user input
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import DndStatus from 'ringcentral-integration/modules/Presence/dndStatus';
import i18n from './i18n';<|fim▁hole|> currentLocale,
) {
if (dndStatus === DndStatus.doNotAcceptAnyCalls) {
return i18n.getString(dndStatus, currentLocale);
}
return i18n.getString(presenceStatus, currentLocale);
}<|fim▁end|>
|
export function getPresenceStatusName(
presenceStatus,
dndStatus,
|
<|file_name|>qa_test.py<|end_file_name|><|fim▁begin|>"""
Copyright 2013 OpERA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
#!/usr/bin/python
## @package algorithm
# ::TODO:: Discover how to include patches externally
import sys
import os
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))
sys.path.insert(0, path)
import unittest
import random
# Modules tested
from feedbackAlgorithm import FeedbackAlgorithm, ExponentialTimeFeedback, KunstTimeFeedback
# Other modules needed
from device import radioDevice
from abstractAlgorithm import AbstractAlgorithm
class QaAlgorithm(unittest.TestCase):
"""
Test algorithm module.
"""
def test_feedback_001(self):
"""
Test the feedback algorithm.
"""
mi = 1,
ma = 256
base = 3
obj = ExponentialTimeFeedback(min_time=mi,
max_time=ma,
base=base
)
# Estado inicial
# Initial state.
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
# 3 ^ 0 == 1 (wait is 1)
self.assertEqual(True, obj.feedback())
# Testa se voltou direito
# Test if got back correctly.
self.assertEqual(False, obj.feedback())
<|fim▁hole|> # We increase the sensing time 3^1 = 3.
obj.increase_time()
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 2
obj.wait() # wait = 3
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback())
obj.decrease_time() # reset time 3^0 = 1 # reseta tempo 3^0 = 1
obj.wait() # wait = 1
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
def test_feedback_002(self):
"""
Test the feedback algorithm
"""
obj = KunstTimeFeedback()
# Estado inicial
# Initial state.
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
# 2 ^ 0 == 1
# wait = 0
self.assertEqual(True, obj.feedback())
# Aumentamos o tempo de sensoriamento 2^1 = 2
# We increase the sensing time 2^1 = 2
obj.increase_time()
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 1
self.assertEqual(False, obj.feedback())
obj.wait() # wait = 2
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback()) # wait gets back to 0 volta wait para 0
obj.wait() # wait = 1
obj.wait() # wait = 2
obj.wait() # wait = 3
obj.wait() # wait = 4
obj.increase_time() # 2^2 = 4
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback()) # wait gets back to 0 # volta wait para 0
obj.decrease_time() # Should be 2^1 = 2
obj.wait()
obj.wait()
self.assertEqual(True, obj.feedback()) # wait gets back to 0 # volta wait para 0
self.assertEqual(False, obj.feedback()) # wait gets back to 0 # volta wait para 0
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
# Aumentamos o tempo de sensoriamento 3^1 = 3
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)]
#[macro_use] extern crate contacts;
#[macro_use] extern crate error_chain;
#[macro_use] extern crate lazy_static;
#[macro_use] extern crate serde_derive;
extern crate postgres;
extern crate r2d2;
extern crate r2d2_postgres;
extern crate rocket;
extern crate rocket_contrib;
extern crate serde;
extern crate serde_json;
extern crate uuid;
use std::collections::HashMap;
use r2d2::{Pool, PooledConnection, GetTimeout};
use r2d2_postgres::PostgresConnectionManager;
use rocket::Outcome::{Success, Failure, Forward};
use rocket::Request;
use rocket::http::{Cookie, Cookies, Status};
use rocket::request::{Form, FromRequest, Outcome};
use rocket::response::Redirect;
use rocket_contrib::{Template, UUID};
use uuid::Uuid;
use contacts::config;
use contacts::errors::*;
use contacts::models::{Person, Session, Contact, as_brand};
lazy_static! {
pub static ref DB_POOL: Pool<PostgresConnectionManager> = contacts::create_db_pool().unwrap();
}
pub struct DB(PooledConnection<PostgresConnectionManager>);
impl DB {
pub fn conn(&self) -> &postgres::Connection {
&*self.0
}
}
impl<'a, 'r> FromRequest<'a, 'r> for DB {
type Error = GetTimeout;
fn from_request(_: &'a Request<'r>) -> Outcome<Self, Self::Error> {
match DB_POOL.get() {
Ok(conn) => Success(DB(conn)),
Err(e) => Failure((Status::ServiceUnavailable, e)),
}
}
}
#[derive(Debug, FromForm)]
struct Email {
email: String,
}
#[post("/login", data="<form>")]
fn login(form: Form<Email>, cookies: &Cookies, db: DB) -> Result<Template> {
let &Email { ref email } = form.get();
// if we start an auth flow, kill whatever session may exist
cookies.remove("session");
let res = find!(db,
"SELECT * FROM PEOPLE WHERE people.email = $1",
&email
).map(Person::from_row);
let (me, new) = match res {
Some(me) => (me, false),
None => {
let me = find!(db,
"INSERT INTO PEOPLE (email) VALUES ($1) RETURNING *",
&email)
.map(Person::from_row)
.ok_or("could not create person")?;
(me, true)
}
};
let login_key: Uuid = find!(db,
"INSERT INTO sessions (account) VALUES ($1) RETURNING login_key",
&me.id)
.ok_or("could not insert session")?
.get(0);
contacts::send_login(email, &login_key, new)?;
let mut context = HashMap::new();
context.insert("email", email);
Ok(Template::render("login", &context))
}
#[derive(Debug, FromForm)]
struct LoginKey {
key: UUID,
}
#[get("/login?<form>")]
fn finish_login(form: LoginKey, cookies: &Cookies, db: DB) -> Result<Redirect> {
let LoginKey { ref key } = form;
// if we are in auth flow, kill whatever session may exist
cookies.remove("session");
let session = find!(db,
"SELECT * FROM sessions WHERE login_key = $1",
&key.into_inner())
.map(Session::from_row)
.ok_or("missing session")?;
if session.session_id.is_some() {
bail!("already got this session whoops");
}
let id: Uuid = find!(db,
" UPDATE sessions
SET session_id = uuid_generate_v4()
WHERE login_key = $1
RETURNING session_id",
&key.into_inner())
.ok_or("failed to set session_id")?
.get(0);
let cookie = Cookie::build("session", id.to_string())
// .domain(blah)
.path("/")
// .secure(true)
.http_only(true)
.finish();
cookies.add(cookie);
Ok(Redirect::to("/"))
}
#[derive(Debug)]
struct Me(Person);
fn get_me(cookies: &Cookies) -> Result<Option<Me>> {
let cookie = match cookies.find("session") {
Some(c) => c,
None => {
return Ok(None)
}
};
let claimed_id: Uuid = cookie.value().parse()
.chain_err(|| "Invalid session cookie")?;
let db = DB(DB_POOL.get()?);
let me = find!(db,
"SELECT p.*
FROM people AS p,
sessions AS s
WHERE s.account = p.id
AND s.session_id = $1",
&claimed_id)
.map(|row| Me(Person::from_row(row)));
Ok(me)
}
impl<'a, 'r> FromRequest<'a, 'r> for Me {
type Error = Error;
fn from_request(request: &'a Request<'r>) -> Outcome<Me, Self::Error> {
match get_me(request.cookies()) {
Ok(Some(me)) => Success(me),
Ok(None) => Forward(()),
Err(e) => Failure((Status::ServiceUnavailable, e)),
}
}
}
#[derive(Debug, FromForm)]
struct NewContactForm {
name: String,
info: String,
}
#[post("/contacts", data="<form>")]
fn new_contact(form: Form<NewContactForm>, me: Me, db: DB) -> Result<Redirect> {
let &NewContactForm { ref name, ref info } = form.get();
write!(db, "INSERT INTO contacts (account, name, info)
VALUES ($1, $2, $3)",
&me.0.id, &name, &info);
Ok(Redirect::to("/"))
}
#[derive(Debug, FromForm)]
struct DeleteContactForm {
id: UUID,
next: Option<String>,
}
#[get("/contacts/delete?<form>")]
fn delete_contact(form: DeleteContactForm, me: Me, db: DB) -> Result<Redirect> {
let DeleteContactForm { id, next } = form;
write!(db, "DELETE FROM contacts WHERE id = $1 AND account = $2",
&id.into_inner(), &me.0.id);
Ok(Redirect::to(&next.unwrap_or("/".into())))
}
#[derive(Debug, FromForm)]
#[allow(non_snake_case)]
pub struct StripeSubscribe {
stripeToken: String,
stripeTokenType: String,
stripeEmail: String,
stripeBillingName: String,
stripeBillingAddressLine1: String,
stripeBillingAddressZip: String,
stripeBillingAddressState: String,
stripeBillingAddressCity: String,
stripeBillingAddressCountry: String,
stripeBillingAddressCountryCode: String,
stripeShippingName: String,
stripeShippingAddressLine1: String,
stripeShippingAddressZip: String,
stripeShippingAddressState: String,
stripeShippingAddressCity: String,
stripeShippingAddressCountry: String,<|fim▁hole|>fn subscribe(form: Form<StripeSubscribe>, me: Me, db: DB) -> Result<Redirect> {
let data = form.get();
write!(db, "UPDATE people
SET address = ($2, $3, $4, $5, $6, $7)
WHERE id = $1",
&me.0.id,
&data.stripeShippingName,
&data.stripeShippingAddressLine1,
&data.stripeShippingAddressZip,
&data.stripeShippingAddressCity,
&data.stripeShippingAddressState,
&data.stripeShippingAddressCountry);
let subscriber = contacts::create_customer(&data.stripeToken, &me.0)?;
write!(db, "UPDATE people SET customer = $1 WHERE id = $2",
&subscriber.id, &me.0.id);
let ref source = subscriber.sources.data[0];
write!(db, "INSERT INTO cards (id, brand, country, customer, last4, name)
VALUES ($1, $2, $3, $4, $5, $6)",
&source.id,
&as_brand(&source.brand),
&source.country,
&source.customer,
&source.last4,
&source.name);
Ok(Redirect::to("/"))
}
#[derive(Serialize)]
struct HomeData<'a> {
me: &'a Person,
contacts: &'a [Contact],
current_path: &'a str,
stripe_public_key: &'a str,
}
#[get("/")]
fn home(me: Me, db: DB) -> Result<Template> {
let stripe_public_key: &str = &config::stripe_secret();
let contacts = filter!(db,
"SELECT * FROM contacts WHERE account = $1",
&me.0.id)
.map(Contact::from_row)
.collect::<Vec<_>>();
let context = HomeData {
me: &me.0,
contacts: &contacts,
current_path: "/",
stripe_public_key,
};
Ok(Template::render("home", &context))
}
#[derive(Serialize)]
pub struct NoContext {}
#[get("/", rank = 2)]
fn index() -> Template {
Template::render("index", &NoContext {})
}
#[get("/logout")]
fn logout(cookies: &Cookies) -> Redirect {
cookies.remove("session");
Redirect::to("/")
}
#[error(404)]
fn not_found() -> Template {
Template::render("error-pages/404-not-found", &NoContext {})
}
#[error(500)]
fn internal_server_error() -> Template {
Template::render("error-pages/500-internal-server-error", &NoContext {})
}
#[error(503)]
fn service_unavailable() -> Template {
Template::render("error-pages/503-service-unavailable", &NoContext {})
}
fn main() {
config::check();
rocket::ignite()
.mount("/", routes![
index,
login,
finish_login,
home,
logout,
new_contact,
delete_contact,
subscribe,
])
.catch(errors![
not_found,
internal_server_error,
service_unavailable,
])
.launch();
}<|fim▁end|>
|
stripeShippingAddressCountryCode: String,
}
#[post("/subscriptions", data="<form>")]
|
<|file_name|>includeme_test.py<|end_file_name|><|fim▁begin|>from bravado_core.spec import Spec
import mock
from pyramid.config import Configurator
from pyramid.registry import Registry
import pytest
from swagger_spec_validator.common import SwaggerValidationError
import pyramid_swagger
from pyramid_swagger.model import SwaggerSchema
<|fim▁hole|>@mock.patch('pyramid_swagger.register_api_doc_endpoints')
@mock.patch('pyramid_swagger.get_swagger_schema')
@mock.patch('pyramid_swagger.get_swagger_spec')
def test_disable_api_doc_views(_1, _2, mock_register):
settings = {
'pyramid_swagger.enable_api_doc_views': False,
'pyramid_swagger.enable_swagger_spec_validation': False,
}
mock_config = mock.Mock(
spec=Configurator,
registry=mock.Mock(spec=Registry, settings=settings))
pyramid_swagger.includeme(mock_config)
assert not mock_register.called
def test_bad_schema_validated_on_include():
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/bad_app/',
'pyramid_swagger.enable_swagger_spec_validation': True,
}
mock_config = mock.Mock(registry=mock.Mock(settings=settings))
with pytest.raises(SwaggerValidationError):
pyramid_swagger.includeme(mock_config)
# TODO: Figure out why this assertion fails on travis
# assert "'info' is a required property" in str(excinfo.value)
@mock.patch('pyramid_swagger.get_swagger_spec')
def test_bad_schema_not_validated_if_spec_validation_is_disabled(_):
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/bad_app/',
'pyramid_swagger.enable_swagger_spec_validation': False,
}
mock_config = mock.Mock(
spec=Configurator, registry=mock.Mock(settings=settings))
pyramid_swagger.includeme(mock_config)
@mock.patch('pyramid_swagger.register_api_doc_endpoints')
def test_swagger_12_only(mock_register):
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/good_app/',
'pyramid_swagger.swagger_versions': ['1.2']
}
mock_config = mock.Mock(registry=mock.Mock(settings=settings))
pyramid_swagger.includeme(mock_config)
assert isinstance(settings['pyramid_swagger.schema12'], SwaggerSchema)
assert mock_register.call_count == 1
@mock.patch('pyramid_swagger.register_api_doc_endpoints')
def test_swagger_20_only(mock_register):
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/good_app/',
'pyramid_swagger.swagger_versions': ['2.0']
}
mock_config = mock.Mock(registry=mock.Mock(settings=settings))
pyramid_swagger.includeme(mock_config)
assert isinstance(settings['pyramid_swagger.schema20'], Spec)
assert not settings['pyramid_swagger.schema12']
assert mock_register.call_count == 1
@mock.patch('pyramid_swagger.register_api_doc_endpoints')
def test_swagger_12_and_20(mock_register):
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/good_app/',
'pyramid_swagger.swagger_versions': ['1.2', '2.0']
}
mock_config = mock.Mock(registry=mock.Mock(settings=settings))
pyramid_swagger.includeme(mock_config)
assert isinstance(settings['pyramid_swagger.schema20'], Spec)
assert isinstance(settings['pyramid_swagger.schema12'], SwaggerSchema)
assert mock_register.call_count == 2<|fim▁end|>
| |
<|file_name|>qpathedit_template.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1">
<context>
<name>QPathEdit</name>
<message>
<location filename="QPathEdit/qpathedit.cpp" line="53"/><|fim▁hole|> <location filename="QPathEdit/qpathedit.cpp" line="407"/>
<source>…</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|>
|
<source>Open File-Dialog</source>
<translation type="unfinished"></translation>
</message>
<message>
|
<|file_name|>header_byte_count_test.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_qc.configuration import Configuration
class HeaderByteCountTest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'header_bytes': 'int',
'reject_on_error': 'bool',
'checked': 'bool'
}
attribute_map = {
'header_bytes': 'header_bytes',
'reject_on_error': 'reject_on_error',
'checked': 'checked'
}
def __init__(self, header_bytes=None, reject_on_error=None, checked=None, local_vars_configuration=None): # noqa: E501
"""HeaderByteCountTest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._header_bytes = None
self._reject_on_error = None
self._checked = None
self.discriminator = None
if header_bytes is not None:
self.header_bytes = header_bytes
if reject_on_error is not None:
self.reject_on_error = reject_on_error
if checked is not None:
self.checked = checked
@property
def header_bytes(self):
"""Gets the header_bytes of this HeaderByteCountTest. # noqa: E501
:return: The header_bytes of this HeaderByteCountTest. # noqa: E501
:rtype: int
"""
return self._header_bytes
@header_bytes.setter
def header_bytes(self, header_bytes):
"""Sets the header_bytes of this HeaderByteCountTest.
:param header_bytes: The header_bytes of this HeaderByteCountTest. # noqa: E501
:type: int
"""
self._header_bytes = header_bytes
@property
def reject_on_error(self):
"""Gets the reject_on_error of this HeaderByteCountTest. # noqa: E501
:return: The reject_on_error of this HeaderByteCountTest. # noqa: E501
:rtype: bool
"""
return self._reject_on_error
@reject_on_error.setter
def reject_on_error(self, reject_on_error):
"""Sets the reject_on_error of this HeaderByteCountTest.
:param reject_on_error: The reject_on_error of this HeaderByteCountTest. # noqa: E501
:type: bool
"""
self._reject_on_error = reject_on_error
@property
def checked(self):
"""Gets the checked of this HeaderByteCountTest. # noqa: E501
:return: The checked of this HeaderByteCountTest. # noqa: E501
:rtype: bool
"""
return self._checked
@checked.setter<|fim▁hole|> def checked(self, checked):
"""Sets the checked of this HeaderByteCountTest.
:param checked: The checked of this HeaderByteCountTest. # noqa: E501
:type: bool
"""
self._checked = checked
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HeaderByteCountTest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, HeaderByteCountTest):
return True
return self.to_dict() != other.to_dict()<|fim▁end|>
| |
<|file_name|>MouseTestDlg.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include "MouseTest.h"
#include "MouseTestDlg.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#endif
#define WM_CURSOR_HIDE (WM_APP + 1)
#define WM_CURSOR_SHOW (WM_APP + 2)
BEGIN_MESSAGE_MAP(CMouseTestDlg, CDialog)
ON_WM_CLOSE()
ON_WM_DESTROY()
ON_WM_DRAWITEM()
ON_WM_HSCROLL()
ON_WM_SETTINGCHANGE()
ON_WM_TIMER()
ON_MESSAGE_VOID(WM_CURSOR_HIDE, OnCursorHide)
ON_MESSAGE_VOID(WM_CURSOR_SHOW, OnCursorShow)
ON_BN_CLICKED(IDC_CLEAR_BTN, OnBnClickedClearBtn)
ON_CBN_SELCHANGE(IDC_SPEED_LST, OnCbnSelchangeSpeedLst)
ON_CBN_SELCHANGE(IDC_ACCEL_LST, OnCbnSelchangeAccelLst)
ON_CBN_SELCHANGE(IDC_THRESHOLD1_LST, OnCbnSelchangeAccelLst)
ON_CBN_SELCHANGE(IDC_THRESHOLD2_LST, OnCbnSelchangeAccelLst)
END_MESSAGE_MAP()
void __stdcall MouseCallback(int dx, int dy, int dz, int btns,
double delay, void* param)
{
CMouseTestDlg* dlg = (CMouseTestDlg*)param;
if (dlg != NULL)
{
dlg->OnMouse(dx, dy, dz, btns, delay);
}
}
CMouseTestDlg::CMouseTestDlg() : CDialog(IDD, NULL)
{
}
void CMouseTestDlg::DoDataExchange(CDataExchange* dx)
{
CDialog::DoDataExchange(dx);
DDX_Control(dx, IDC_CANVAS_BOX, m_canvasBox);
DDX_Control(dx, IDC_CLEAR_BTN, m_clearBtn);
DDX_Control(dx, IDC_RATE_BOX, m_rateBox);
DDX_Control(dx, IDC_SPEED_LST, m_speedLst);
DDX_Control(dx, IDC_ACCEL_LST, m_accelLst);
DDX_Control(dx, IDC_THRESHOLD1_LST, m_threshold1Lst);
DDX_Control(dx, IDC_THRESHOLD2_LST, m_threshold2Lst);
DDX_Control(dx, IDC_CURSOR_CHK, m_cursorChk);
}
BOOL CMouseTestDlg::OnInitDialog()
{
CDialog::OnInitDialog();
SetIcon(AfxGetApp()->LoadIcon(IDR_MAINFRAME), TRUE);
SetIcon(AfxGetApp()->LoadIcon(IDR_MAINFRAME), FALSE);
InitializeCriticalSection(&m_critsect);
SetTimer(1, USER_TIMER_MINIMUM, NULL);
m_buttons = false;
m_cursorShow = true;
m_cursorHide = false;
m_cursorChk.SetCheck(TRUE);
m_input.open(NULL, MouseCallback, this);
OnUpdateSpeed();
return TRUE;
}
void CMouseTestDlg::OnMouse(int dx, int dy, int dz, int btns,
double delay)
{
HWND hwnd;
HDC hdc;
POINT point;
RECT rect;
COLORREF color;
// check foreground
if (::GetForegroundWindow() != GetSafeHwnd())
{
return;
}
EnterCriticalSection(&m_critsect);
color = 0;
// check button state
if (btns == 0)
{
goto OnExit;
}
// check previous button state
if (!m_buttons)
{
GetCursorPos(&m_point);
dx = 0;
dy = 0;
}
// offset cursor position
m_point.x += dx;
m_point.y += dy;
// check cursor position
hwnd = m_canvasBox.GetSafeHwnd();
point = m_point;
::ScreenToClient(hwnd, &point);
::GetClientRect(hwnd, &rect);
if (!PtInRect(&rect, point))
{
goto OnExit;
}
// point color
if (btns & MK_LBUTTON) color |= 0x0000ff;
if (btns & MK_RBUTTON) color |= 0xff0000;
if (btns & MK_MBUTTON) color |= 0x00ff00;
// backup point
m_points.push_back(MousePoint(point, color, delay));
// draw point
hdc = ::GetDC(hwnd);
if (m_brushes.find(color) == m_brushes.end())
{
m_brushes[color] = CreateSolidBrush(color);
}
rect.left = __max(rect.left, point.x - 1);
rect.top = __max(rect.top, point.y - 1);
rect.right = __min(rect.right, point.x + 1);
rect.bottom = __min(rect.bottom, point.y + 1);
FillRect(hdc, &rect, m_brushes[color]);
::ReleaseDC(hwnd, hdc);
OnExit:
// backup button state
m_buttons = btns != 0;
// show cursor
if (m_cursorHide && color == 0)
{
PostMessage(WM_CURSOR_SHOW, 0, 0);
m_cursorHide = false;
}
// hide cursor
else if (!m_cursorHide && color != 0)
{
PostMessage(WM_CURSOR_HIDE, 0, 0);
m_cursorHide = true;
}
LeaveCriticalSection(&m_critsect);
}
void CMouseTestDlg::OnUpdateSpeed()
{
CString text;
UINT speed, accel[3];
// speed
SystemParametersInfo(SPI_GETMOUSESPEED, 0, &speed, 0);
m_speedLst.SetCurSel(speed >> 1);
// accel
SystemParametersInfo(SPI_GETMOUSE, 0, accel, 0);<|fim▁hole|> m_threshold1Lst.SetCurSel(accel[0] >> 1);
m_threshold2Lst.SetCurSel(accel[1] >> 1);
m_accelLst.SetCurSel(accel[2]);
}
void CMouseTestDlg::OnOK()
{
}
void CMouseTestDlg::OnCancel()
{
}
void CMouseTestDlg::OnClose()
{
DestroyWindow();
CDialog::OnClose();
}
void CMouseTestDlg::OnDestroy()
{
m_input.close();
KillTimer(1);
DeleteCriticalSection(&m_critsect);
for (MouseBrushes::iterator it = m_brushes.begin();
it != m_brushes.end(); it++)
{
DeleteObject(it->second);
}
CDialog::OnDestroy();
}
void CMouseTestDlg::OnDrawItem(int id, LPDRAWITEMSTRUCT data)
{
if (id == m_canvasBox.GetDlgCtrlID())
{
HDC hdc;
RECT client;
RECT rect;
POINT point;
COLORREF color;
hdc = data->hDC;
client = data->rcItem;
FillRect(hdc, &client, (HBRUSH)GetStockObject(BLACK_BRUSH));
EnterCriticalSection(&m_critsect);
for (int i = 0; i < (int)m_points.size(); i++)
{
point = m_points[i].point;
color = m_points[i].color;
if (!PtInRect(&client, point))
{
continue;
}
if (m_brushes.find(color) == m_brushes.end())
{
m_brushes[color] = CreateSolidBrush(color);
}
rect.left = __max(client.left, point.x - 1);
rect.top = __max(client.top, point.y - 1);
rect.right = __min(client.right, point.x + 1);
rect.bottom = __min(client.bottom, point.y + 1);
FillRect(hdc, &rect, m_brushes[color]);
}
LeaveCriticalSection(&m_critsect);
}
else if (id == m_rateBox.GetDlgCtrlID())
{
std::vector<double> delays;
CString text;
HDC hdc;
HBITMAP hbm;
HGDIOBJ hbmp;
HGDIOBJ hpen;
RECT rect;
int width;
int height;
int delayNum1;
int delayNum2;
double delaySum;
double scaleY;
rect = data->rcItem;
width = rect.right - rect.left;
height = rect.bottom - rect.top;
// push the newest points
delayNum1 = 0;
delayNum2 = 0;
delaySum = 0;
if (!TryEnterCriticalSection(&m_critsect))
{
return;
}
for (int i = (int)m_points.size() - 1; i >= 0; i--)
{
bool done = true;
double delay = m_points[i].delay;
// only few seconds
if (delaySum < 3)
{
delayNum1++;
delaySum += delay;
done = false;
}
// and 'width' points
if (delayNum2 < width)
{
delayNum2++;
done = false;
}
if (done)
{
break;
}
delays.push_back(delay);
}
LeaveCriticalSection(&m_critsect);
// draw FPS curve
hdc = CreateCompatibleDC(data->hDC);
hbm = CreateCompatibleBitmap(data->hDC, width, height);
hbmp = SelectObject(hdc, hbm);
hpen = SelectObject(hdc, (HPEN)GetStockObject(BLACK_PEN));
FillRect(hdc, &rect, (HBRUSH)GetStockObject(WHITE_BRUSH));
scaleY = (double)height / 1000;
for (int i = 0; i < delayNum2; i++)
{
int x = rect.right - i - 1;
int y = rect.bottom - (int)(scaleY / delays[i]) - 1;
x = __max(rect.left, x);
y = __max(rect.top, y);
if (i == 0)
{
MoveToEx(hdc, x, y, NULL);
}
else
{
LineTo(hdc, x, y);
}
}
// calculate FPS value
if (!delays.empty())
{
std::sort(delays.begin(), delays.end());
text.Format(TEXT("%.0lfHz"), 1 / delays[delays.size() >> 1]);
SetBkColor(hdc, 0xffffff);
SetTextColor(hdc, 0x000000);
DrawText(hdc, text, -1, &rect, DT_CENTER | DT_VCENTER | DT_SINGLELINE);
}
// show up
BitBlt(data->hDC, rect.left, rect.top, width, height,
hdc, 0, 0, SRCCOPY);
SelectObject(hdc, hbmp);
SelectObject(hdc, hpen);
DeleteObject(hbm);
DeleteDC(hdc);
}
else
{
CDialog::OnDrawItem(id, data);
}
}
void CMouseTestDlg::OnSettingChange(UINT flags, LPCTSTR section)
{
CDialog::OnSettingChange(flags, section);
if (flags == SPI_SETMOUSESPEED || flags == SPI_SETMOUSE)
{
OnUpdateSpeed();
}
}
void CMouseTestDlg::OnTimer(UINT_PTR id)
{
if (id == 1)
{
m_rateBox.InvalidateRect(NULL, FALSE);
}
CDialog::OnTimer(id);
}
void CMouseTestDlg::OnCursorHide()
{
EnterCriticalSection(&m_critsect);
if (m_cursorShow)
{
if (m_cursorChk.GetCheck() == 0)
{
ShowCursor(FALSE);
m_cursorShow = false;
}
}
LeaveCriticalSection(&m_critsect);
}
void CMouseTestDlg::OnCursorShow()
{
EnterCriticalSection(&m_critsect);
if (!m_cursorShow)
{
ShowCursor(TRUE);
m_cursorShow = true;
}
LeaveCriticalSection(&m_critsect);
}
void CMouseTestDlg::OnBnClickedClearBtn()
{
EnterCriticalSection(&m_critsect);
m_points.clear();
m_canvasBox.InvalidateRect(NULL, FALSE);
LeaveCriticalSection(&m_critsect);
}
void CMouseTestDlg::OnCbnSelchangeSpeedLst()
{
CString text;
UINT speed;
m_speedLst.GetWindowText(text);
speed = _ttoi(text);
SystemParametersInfo(SPI_SETMOUSESPEED, 0, (void*)speed, SPIF_SENDCHANGE);
OnUpdateSpeed();
}
void CMouseTestDlg::OnCbnSelchangeAccelLst()
{
CString text;
UINT accel[3];
if (SystemParametersInfo(SPI_GETMOUSE, 0, accel, 0))
{
m_threshold1Lst.GetWindowText(text);
accel[0] = _ttoi(text);
m_threshold2Lst.GetWindowText(text);
accel[1] = _ttoi(text);
m_accelLst.GetWindowText(text);
accel[2] = _ttoi(text);
SystemParametersInfo(SPI_SETMOUSE, 0, accel, SPIF_SENDCHANGE);
OnUpdateSpeed();
}
}<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.