prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>backend.py<|end_file_name|><|fim▁begin|>###############################################################################
##
## Copyright (C) 2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from twisted.internet import reactor<|fim▁hole|>
class Component(ApplicationSession):
"""
Application component that provides procedures which
return complex results.
"""
def onConnect(self):
self.join("realm1")
def onJoin(self, details):
def add_complex(a, ai, b, bi):
return CallResult(c = a + b, ci = ai + bi)
self.register(add_complex, 'com.myapp.add_complex')
def split_name(fullname):
forename, surname = fullname.split()
return CallResult(forename, surname)
self.register(split_name, 'com.myapp.split_name')<|fim▁end|> | from twisted.internet.defer import inlineCallbacks
from autobahn.wamp.types import CallResult
from autobahn.twisted.wamp import ApplicationSession |
<|file_name|>users.rs<|end_file_name|><|fim▁begin|>#![crate_name = "uu_users"]
/*
* This file is part of the uutils coreutils package.
*
* (c) KokaKiwi <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/* last synced with: whoami (GNU coreutils) 8.22 */
// Allow dead code here in order to keep all fields, constants here, for consistency.
#![allow(dead_code)]
extern crate getopts;
extern crate libc;
#[macro_use]
extern crate uucore;
use getopts::Options;
use std::ffi::{CStr, CString};
use std::mem;
use std::ptr;
use uucore::utmpx::*;
extern {
fn getutxent() -> *const c_utmp;
fn getutxid(ut: *const c_utmp) -> *const c_utmp;
fn getutxline(ut: *const c_utmp) -> *const c_utmp;
fn pututxline(ut: *const c_utmp) -> *const c_utmp;
fn setutxent();
fn endutxent();
#[cfg(any(target_os = "macos", target_os = "linux"))]
fn utmpxname(file: *const libc::c_char) -> libc::c_int;
}
#[cfg(target_os = "freebsd")]
unsafe extern fn utmpxname(_file: *const libc::c_char) -> libc::c_int {<|fim▁hole|>}
static NAME: &'static str = "users";
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub fn uumain(args: Vec<String>) -> i32 {
let mut opts = Options::new();
opts.optflag("h", "help", "display this help and exit");
opts.optflag("V", "version", "output version information and exit");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!("{}", f),
};
if matches.opt_present("help") {
println!("{} {}", NAME, VERSION);
println!("");
println!("Usage:");
println!(" {} [OPTION]... [FILE]", NAME);
println!("");
println!("{}", opts.usage("Output who is currently logged in according to FILE."));
return 0;
}
if matches.opt_present("version") {
println!("{} {}", NAME, VERSION);
return 0;
}
let filename = if !matches.free.is_empty() {
matches.free[0].as_ref()
} else {
DEFAULT_FILE
};
exec(filename);
0
}
fn exec(filename: &str) {
unsafe {
utmpxname(CString::new(filename).unwrap().as_ptr());
}
let mut users = vec!();
unsafe {
setutxent();
loop {
let line = getutxent();
if line == ptr::null() {
break;
}
if (*line).ut_type == USER_PROCESS {
let user = String::from_utf8_lossy(CStr::from_ptr(mem::transmute(&(*line).ut_user)).to_bytes()).to_string();
users.push(user);
}
}
endutxent();
}
if !users.is_empty() {
users.sort();
println!("{}", users.join(" "));
}
}<|fim▁end|> | 0 |
<|file_name|>gt.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
// pub trait FixedSizeArray<T> {
// /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]
// fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;
// type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> {
// self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {
// Ord::cmp(&&self[..], &&other[..])
// }
// }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32<|fim▁hole|> type A = T;
type B = T;
#[test]
fn gt_test1() {
let array_a: [A; 24] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23
];
let array_b: [B; 24] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24
];
assert_eq!(array_a.ge(&array_b), false);
assert_eq!(array_a > array_b, false);
}
#[test]
fn gt_test2() {
let array_a: [A; 24] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23
];
let array_b: [B; 24] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23
];
assert_eq!(array_a.gt(&array_b), false);
assert_eq!(array_a > array_b, false);
}
#[test]
fn gt_test3() {
let array_a: [A; 24] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24
];
let array_b: [B; 24] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23
];
assert_eq!(array_a.gt(&array_b), true);
assert_eq!(array_a > array_b, true);
}
}<|fim▁end|> | // }
type T = i32; |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>def get_file_extension(filename):<|fim▁hole|><|fim▁end|> | return filename.split(".")[-1] |
<|file_name|>compile_speed.rs<|end_file_name|><|fim▁begin|>//1.h amd 3.45GHZ cost time 3600ms ,new version 2800ms 1.89 2609ms fix2 2516ms 2094ms
//36_2.rs cost time 5375ms (1.83 4985ms),new version 4438ms 1.89 4156ms 3375ms
void main()
{
start=xf.tick
for i=1;i<=10;i++
rf.cmd("rush ..\\src\\example\\test\\1.rs")
<|fim▁hole|>}<|fim▁end|> | printl
printl(xf.tick-start)
|
<|file_name|>DeviceShape.cpp<|end_file_name|><|fim▁begin|>// -*-C++ -*-
/** \file
********************************************************************
* Device shape base class for route window.
*
* \author Rüdiger Krauße,
* Tobias Schlemmer <[email protected]>
* \license GPL
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*
********************************************************************
*\addtogroup GUIroute
*\{
********************************************************************/
#include "src/kernel/Defs.h"
#include "src/kernel/error.h"
#include "src/wxGUI/MutFrame.h"
#include "src/wxGUI/Routing/GUIRoute.h"
#include "src/wxGUI/Routing/DeviceShape.h"
#include "src/wxGUI/Routing/RouteIcons.h"
#include "src/wxGUI/Routing/DebugRoute.h"
#include "src/wxGUI/Routing/BoxChannelShape.h"
#include "src/wxGUI/Routing/BoxDlg.h"
#include "src/wxGUI/Routing/InputDevDlg.h"
#include "src/wxGUI/Routing/OutputDevDlg.h"
#include "src/wxGUI/Routing/GUIRoute-inlines.h"
#include <algorithm>
#include "wx/defs.h"
#include "wx/bmpbuttn.h"
#include "wx/msgdlg.h"
//#include "MutApp.h"
//#include "MutIcon.h"
//#include "MutRouteWnd.h"
//#include "InputDevDlg.h"
//#include "Device.h"
// common part of the macros below
#define wxIMPLEMENT_CLASS_COMMON_TEMPLATE1(name, basename, baseclsinfo2, func, T1) \
template<> \
wxClassInfo name<T1>::ms_classInfo(wxT(#name "<" #T1 ">"), \
&basename::ms_classInfo, \
baseclsinfo2, \
(int) sizeof(name<T1>), \
(wxObjectConstructorFn) func); \
template<> \
wxClassInfo *name<T1>::GetClassInfo() const \
{ return &name<T1>::ms_classInfo; }
#define wxIMPLEMENT_CLASS_COMMON1_TEMPLATE1(name, basename, func, T1) \
wxIMPLEMENT_CLASS_COMMON_TEMPLATE1(name, basename, NULL, func, T1)
// Single inheritance with one base class
#define IMPLEMENT_ABSTRACT_CLASS_TEMPLATE1(name, basename, T1) \
wxIMPLEMENT_CLASS_COMMON1_TEMPLATE1(name, basename, NULL, T1)
// Single inheritance with one base class
#define IMPLEMENT_DYNAMIC_CLASS_TEMPLATE1(name, basename, T1) \
template<> \
wxObject* name<T1>::wxCreateObject() \
{ return new name<T1>; } \
wxIMPLEMENT_CLASS_COMMON1_TEMPLATE1(name, basename, \
name<T1>::wxCreateObject, T1)
using namespace mutaborGUI;
using namespace mutabor;
BEGIN_EVENT_TABLE_TEMPLATE1(MutDeviceShape, MutIconShape, T)
EVT_KEY_DOWN(MutDeviceShape::OnKeyDown)
EVT_LEFT_DCLICK(MutDeviceShape::LeftDblClickEvent)
EVT_MENU(CM_MOVE_UP, MutDeviceShape::CmMoveIcon)
EVT_MENU(CM_MOVE_DOWN, MutDeviceShape::CmMoveIcon)
EVT_MENU(CM_DEVICE_STATE_CHANGED, MutDeviceShape::CmDeviceNotification)
EVT_BUTTON(CM_PLAYDEVICE, MutDeviceShape::PlayButtonPressed)
EVT_BUTTON(CM_PAUSEDEVICE, MutDeviceShape::PlayButtonPressed)
EVT_BUTTON(CM_STOPDEVICE, MutDeviceShape::PlayButtonPressed)
END_EVENT_TABLE()
MUTABOR_NAMESPACE(mutaborGUI)
template<class T>
MutDeviceShape<T>::~MutDeviceShape() {
TRACEC;
if (device) {
try {
disconnect(device,this);
TRACEC;
} catch (const mutabor::error::unreachable_exception & e) {
mutabor::unhandled_exception_handler();
}
}
if (playbuttons) delete playbuttons;
TRACEC;
}
template<class T>
bool MutDeviceShape<T>::Create (wxWindow * parent,
wxWindowID id,
devicetype & d)
{
if (!d) return false;
DEBUGLOG (other, ("Checking icon"));
mutASSERT(MidiInputDevBitmap.IsOk());
mutASSERT(MidiOutputDevBitmap.IsOk());
TRACEC;
bool fine =
Create (parent, id, d->GetName());
TRACEC;
if (fine)
connect(d,this);
TRACEC;
return fine;
}
template<class T>
void MutDeviceShape<T>::Add(MutBoxChannelShape * route)
{
#ifdef DEBUG
MutBoxChannelShapeList::iterator pos =
std::find(routes.begin(),routes.end(),route);
mutASSERT(pos == routes.end());
#endif
routes.push_back(route);
// ClearPerimeterPoints();
Refresh();
Update();
}
template<class T>
bool MutDeviceShape<T>::Replace(MutBoxChannelShape * oldroute,
MutBoxChannelShape * newroute)
{
#ifdef DEBUG
MutBoxChannelShapeList::iterator pos =
std::find(routes.begin(),routes.end(),oldroute);
mutASSERT(pos != routes.end());
#endif
bool retval = Remove(oldroute);
Add(newroute);
Recompute();
return retval;
}
template<class T>
bool MutDeviceShape<T>::Remove(MutBoxChannelShape * route)
{
MutBoxChannelShapeList::iterator pos =
std::find(routes.begin(),routes.end(),route);
if (pos == routes.end()) {
UNREACHABLEC;
return false;
} else {
routes.erase(pos);
}
Recompute();
return true;
}
template<class T>
bool MutDeviceShape<T>::MoveRoutes (MutDeviceShape * newclass)
{
routes.swap(newclass->routes);
Recompute();
return true;
}
template<class T>
bool MutDeviceShape<T>::Recompute()
{
// ClearPerimeterPoints();
SetIcon(GetMutIcon());
// SetLabel (filename.GetFullName());
return GetIcon().IsOk();
}
template<class T>
void MutDeviceShape<T>::ReadPanel(FilterPanel * panel,
MutBoxChannelShape * channel)
{
mutASSERT(panel);
mutASSERT(channel);
if (!panel || !channel) return;
bool active = panel->IsShown();
thistype * newShape = panel->GetCurrentSelection();
Route & route = channel->GetRoute();
if (!active) {
TRACEC;
disconnect(channel,this);
TRACEC;
return;
} else if (newShape != this) {
TRACEC;
reconnect(channel,this,newShape);
TRACEC;
}
if (newShape) {
wxWindow * subpanel = panel->GetCurrentDevicePage();
if (!panel) {
UNREACHABLEC;
return;
}
TRACEC;
newShape->ReadFilterPanel(subpanel,route);
}
TRACEC;
}
template<class T>
void MutDeviceShape<T>::OnKeyDown (wxKeyEvent & event) {
if (event.HasModifiers()) {
event.Skip();
return;
}
/* Other inspirations:
case WXK_DELETE:
// cursor keys
*/
switch (event.GetKeyCode()) {
case WXK_NUMPAD_ENTER:
case WXK_RETURN:
case WXK_SPACE:
case WXK_NUMPAD_SPACE:
case WXK_NUMPAD_ADD:
case WXK_ADD:
case WXK_WINDOWS_MENU:
case WXK_MENU:
{
wxCommandEvent command(wxEVT_COMMAND_MENU_SELECTED,
CM_LEFT_DOUBLE_CLICK);
wxPostEvent(this,command);
return;
}
default:
event.Skip();
}
}
/**
* Move the corresponding device in the device list and
* update the GUI according to the new order.
*
* \param event wxCommandEvent containing the request
*/
template <class T>
void MutDeviceShape<T>::CmMoveIcon (wxCommandEvent & event) {
switch (event.GetId()) {
case CM_MOVE_UP:
MoveDevice(-1);
break;
case CM_MOVE_DOWN:
MoveDevice(+1);
break;
}
}
template <class T>
void MutDeviceShape<T>::DoDeviceNotification(wxCommandEvent & mutUNUSED(event))
{
if (!device || !playbuttons) return;
MutaborModeType mode = device->GetMode();
bool open = device->IsOpen();
bool hidePlay = !open;
bool hidePause = hidePlay,
hideStop = hidePlay;
if (open) {
switch (mode) {
case DevicePlay:
hidePlay = true;
break;
case DeviceStop:
hideStop = true;
break;
case DevicePause:
hidePause = true;
break;
case DeviceKilled:
case DeviceUnregistered:
case DeviceInitializing:
case DeviceCompileError:
case DeviceTimingError:
return;
}
}
bool dolayout = false;
wxSizerItemList & playlist = playbuttons->GetChildren();
for (wxSizerItemList::iterator i = playlist.begin();
i != playlist.end(); i++) {
wxWindow * button = (*i)->GetWindow();
bool hide = !open;
switch (button -> GetId()) {
case CM_PLAYDEVICE:
hide = hidePlay;
break;
case CM_STOPDEVICE:
hide = hideStop;
break;
case CM_PAUSEDEVICE:
hide = hidePause;
break;
}
if (hide) {
if (playbuttons->IsShown(button)) {
playbuttons->Hide(button);
dolayout = true;
}
} else {
if (!playbuttons->IsShown(button)) {
playbuttons->Show(button);<|fim▁hole|> }
if (dolayout) {
playbuttons->Layout();
wxSize size = playbuttons->GetMinSize();
playbuttons->SetDimension(0,0,size.GetWidth(),size.GetHeight());
Update();
}
}
template <class T>
void MutDeviceShape<T>::createPlayButtons()
{
mutASSERT(!playbuttons);
playbuttons = new wxBoxSizer(wxVERTICAL);
if (!playbuttons) return;
wxBitmapButton * button =
new wxBitmapButton(this,CM_PLAYDEVICE,DevicePlayBitmap);
playbuttons->Add(button);
button =
new wxBitmapButton(this,CM_PAUSEDEVICE,DevicePauseBitmap);
playbuttons->Add(button);
button =
new wxBitmapButton(this,CM_STOPDEVICE,DeviceStopBitmap);
playbuttons->Add(button);
playbuttons->Layout();
wxSize size = playbuttons->GetMinSize();
playbuttons->SetDimension(0,0,size.GetWidth(),size.GetHeight());
}
template <class T>
void MutDeviceShape<T>::createPauseButton()
{
mutASSERT(!playbuttons);
playbuttons = new wxBoxSizer(wxVERTICAL);
if (!playbuttons) return;
wxBitmapButton * button =
new wxBitmapButton(this,CM_PAUSEDEVICE,DevicePauseBitmap);
playbuttons->Add(button);
playbuttons->Layout();
wxSize size = playbuttons->GetMinSize();
playbuttons->SetDimension(0,0,size.GetWidth(),size.GetHeight());
}
template <class T>
void MutDeviceShape<T>::createRecordButtons()
{
mutASSERT(!playbuttons);
playbuttons = new wxBoxSizer(wxVERTICAL);
if (!playbuttons) return;
wxBitmapButton * button =
new wxBitmapButton(this,CM_PLAYDEVICE,DeviceRecordBitmap);
playbuttons->Add(button);
button =
new wxBitmapButton(this,CM_PAUSEDEVICE,DevicePauseBitmap);
playbuttons->Add(button);
button =
new wxBitmapButton(this,CM_STOPDEVICE,DeviceStopBitmap);
playbuttons->Add(button);
playbuttons->Layout();
wxSize size = playbuttons->GetMinSize();
playbuttons->SetDimension(0,0,size.GetWidth(),size.GetHeight());
}
template <class T>
void MutDeviceShape<T>::PlayButtonPressed(wxCommandEvent & event)
{
if (!device) return;
if (!device->IsOpen()) return;
switch (event.GetId()) {
case CM_PLAYDEVICE:
device -> Play();
break;
case CM_PAUSEDEVICE:
device -> Pause();
break;
case CM_STOPDEVICE:
device -> Stop();
break;
}
}
template <class T>
void MutDeviceShape<T>::DoLeftDblClick() {
TRACEC;
DeviceDialog * dlg = ShowDeviceDialog();
int Res = dlg->ShowModal();
TRACEC;
bool destroySelf = false;
wxWindow * parent = m_parent; // to be availlable after deleten.
TRACEC;
if (Res == wxID_OK) {
DevType type = dlg->GetType();
if (CanHandleType (type)) {
TRACEC;
readDialog (dlg);
} else if (type != DTNotSet) { // assure type is set.
TRACEC;
devicetype dev =
DeviceFactory::Create<devicetype>(type);
if (dev) {
TRACEC;
thistype * newdev =
GUIDeviceFactory::CreateShape (dev,
GetParent());
if (! newdev) {
dlg->Destroy();
UNREACHABLEC;
return;
}
mutASSERT(newdev->device);
TRACEC;
newdev -> readDialog (dlg);
if (LogicOn && !(newdev->device->IsOpen()))
newdev->device->Open();
TRACEC;
destroySelf = replaceSelfBy (newdev);
}
}
} else if (Res == ::wxID_REMOVE) {
TRACEC;
device -> Destroy();
}
// Now, we may be deleted.
dlg->Destroy();
DebugCheckRoutes();
TRACEC;
if (Res != ::wxID_REMOVE && !destroySelf) {
Layout();
InvalidateBestSize();
Fit();
Refresh();
}
if (parent) {
parent->InvalidateBestSize();
parent->Layout();
parent->FitInside();
parent->Refresh();
parent->Update();
} else if (Res != ::wxID_REMOVE && !destroySelf) Update();
/* we don't need to destroy this control.
This should have been done during device destruction
*/
TRACE;
}
template<class T>
typename MutDeviceShape<T>::DeviceDialog * MutDeviceShape<T>::ShowDeviceDialog() {
ABSTRACT_FUNCTIONC;
abort();
}
template <class T>
bool MutDeviceShape<T>::DetachDevice ()
{
wxWindow * parent = m_parent;
wxSizer * sizer = GetContainingSizer();
Hide();
if (sizer) {
sizer -> Detach(this);
}
if (parent) {
parent->Layout();
parent->FitInside();
parent->SetVirtualSize(wxDefaultSize);
parent->Refresh();
parent->Update();
}
TRACEC;
device->Destroy();
TRACEC;
return true;
}
template<class T>
bool MutDeviceShape<T>::replaceSelfBy (thistype * newshape)
{
/** \todo transfer this function to GUIRoute */
mutASSERT (newshape);
mutASSERT (newshape->device);
TRACEC;
if (device) // might be zero as in MutNewInputDeviceShape
device->MoveRoutes(newshape->GetDevice());
TRACEC;
newshape->MoveBeforeInTabOrder (this);
wxSizer * sizer = GetContainingSizer();
sizer -> Replace (this, newshape, false);
newshape->SetFocus();
Hide();
wxWindow * parent = m_parent;
parent->RemoveChild(this);
parent->Layout();
parent->FitInside();
parent->SetVirtualSize(wxDefaultSize);
TRACEC;
device->Destroy();
// at this moment this points to invalid memory
TRACET(MutInputDeviceShape);
return true;
}
// instantiate MutInputDeviceShape
template<>
InputDevDlg * MutInputDeviceShape::ShowDeviceDialog() {
InputDevDlg * in = new InputDevDlg (m_parent);
#ifdef RTMIDI
if (rtmidiin) {
try {
rtmidi::PortList ports = rtmidiin->getPortList();
if (ports.empty()) {
in->AppendPortChoiceNoDevice();
}
else for (rtmidi::PortList::iterator i = ports.begin();
i != ports.end();
++i) {
in->AppendPortChoice(*i);
}
} catch (const rtmidi::Error &error) {
error.printMessage();
in->AppendPortChoiceNoDevice();
}
}
#else
STUBC;
#endif
// in->SetType(DTUnknown);
in->SelectMidiDevice(0);
in->SetMidiFile(wxEmptyString);
in->SetGUIDOFile(wxEmptyString);
InitializeDialog(in);
in->Fit();
return in;
}
template<>
OutputDevDlg * MutOutputDeviceShape::ShowDeviceDialog()
{
OutputDevDlg * out = new OutputDevDlg (m_parent);
#ifdef RTMIDI
if (rtmidiout) {
try {
rtmidi::PortList ports = rtmidiout->getPortList();
if (ports.empty()) {
out->AppendPortChoiceNoDevice();
}
else for (rtmidi::PortList::iterator i = ports.begin();
i != ports.end();
++i) {
out->AppendPortChoice(*i);
}
} catch (const rtmidi::Error &error) {
wxMessageBox(error.getMessage());
error.printMessage();
out->AppendPortChoiceNoDevice();
}
}
#else
/* nMidi = midiInGetNumDevs();
if ( nMidi )
{
for (int i = 0; i < nMidi; i++)
{
MIDIINCAPS miin;
midiInGetDevCaps(i, &miin, sizeof(MIDIINCAPS));
DataR0.Device.AddString(miin.szPname);
}
}
else
DataR0.Device.AddString("no device");*/
#endif
// in->SetType(DTUnknown);
out->SelectMidiDevice(0);
out->SetMidiFile(wxEmptyString);
out->SetGUIDOFile(wxEmptyString);
out->SetMidiBendingRange(2);
out->SetMidiFileBendingRange(2);
InitializeDialog(out);
out->Fit();
return out;
}
IMPLEMENT_ABSTRACT_CLASS_TEMPLATE1(MutDeviceShape, MutIconShape, inputdevicetypes)
IMPLEMENT_ABSTRACT_CLASS_TEMPLATE1(MutDeviceShape, MutIconShape, outputdevicetypes)
template class MutDeviceShape<inputdevicetypes>;
template class MutDeviceShape<outputdevicetypes>;
MUTABOR_NAMESPACE_END(mutaborGUI)
template class std::list<mutaborGUI::MutInputDeviceShape *>;
template class std::list<mutaborGUI::MutOutputDeviceShape *>;
/*
* \}
*/<|fim▁end|> | dolayout = true;
}
} |
<|file_name|>mrn_udf_last_insert_grn_id.cpp<|end_file_name|><|fim▁begin|>/* -*- c-basic-offset: 2; indent-tabs-mode: nil -*- */<|fim▁hole|> Copyright(C) 2010 Tetsuro IKEDA
Copyright(C) 2010-2013 Kentoku SHIBA
Copyright(C) 2011-2017 Kouhei Sutou <[email protected]>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA
*/
#include <mrn_mysql.h>
#include <mrn_windows.hpp>
#include <mrn_table.hpp>
#include <mrn_macro.hpp>
#include <mrn_current_thread.hpp>
MRN_BEGIN_DECLS
MRN_API my_bool last_insert_grn_id_init(UDF_INIT *init, UDF_ARGS *args, char *message)
{
if (args->arg_count != 0) {
strcpy(message, "last_insert_grn_id must not have arguments");
return 1;
}
init->maybe_null = 0;
return 0;
}
MRN_API longlong last_insert_grn_id(UDF_INIT *init, UDF_ARGS *args, char *is_null, char *error)
{
THD *thd = current_thd;
st_mrn_slot_data *slot_data = mrn_get_slot_data(thd, false);
if (slot_data == NULL) {
return 0;
}
longlong last_insert_record_id = slot_data->last_insert_record_id;
return last_insert_record_id;
}
MRN_API void last_insert_grn_id_deinit(UDF_INIT *init)
{
}
MRN_END_DECLS<|fim▁end|> | /* |
<|file_name|>ExprPartyName.java<|end_file_name|><|fim▁begin|>package com.alessiodp.parties.bukkit.addons.external.skript.expressions;
import ch.njol.skript.classes.Changer;
import ch.njol.skript.doc.Description;
import ch.njol.skript.doc.Examples;
import ch.njol.skript.doc.Name;
import ch.njol.skript.doc.Since;
import ch.njol.skript.expressions.base.SimplePropertyExpression;
import ch.njol.util.coll.CollectionUtils;
import com.alessiodp.parties.api.interfaces.Party;
import org.bukkit.event.Event;
@Name("Party Name")
@Description("Get the name of the given party.")
@Examples({"send \"%name of party with name \"test\"%\"",
"send \"%name of event-party%\""})
@Since("3.0.0")
public class ExprPartyName extends SimplePropertyExpression<Party, String> {
static {
register(ExprPartyName.class, String.class, "name", "party");
}
@Override
public Class<? extends String> getReturnType() {
return String.class;
}
@Override
protected String getPropertyName() {
return "name";
}
@Override
public String convert(Party party) {
return party.getName();
}
@Override
public void change(Event e, Object[] delta, Changer.ChangeMode mode){
if (delta != null) {
Party party = getExpr().getSingle(e);
String newName = (String) delta[0];
switch (mode) {
case SET:
party.rename(newName);
break;<|fim▁hole|> break;
default:
break;
}
}
}
@Override
public Class<?>[] acceptChange(final Changer.ChangeMode mode) {
return (mode == Changer.ChangeMode.SET || mode == Changer.ChangeMode.DELETE) ? CollectionUtils.array(String.class) : null;
}
}<|fim▁end|> | case DELETE:
party.rename(null); |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
'''Test for DDNS forwarding'''
from dnstest.test import Test
t = Test()
master = t.server("knot")
slave = t.server("knot")
zone = t.zone("example.com.")
t.link(zone, master, slave, ddns=True)<|fim▁hole|>master.zones_wait(zone)
seri = slave.zones_wait(zone)
# OK
update = slave.update(zone)
update.add("forwarded.example.com.", 1, "TXT", "forwarded")
update.send("NOERROR")
resp = master.dig("forwarded.example.com.", "TXT")
resp.check("forwarded")
slave.zones_wait(zone, seri)
t.xfr_diff(master, slave, zone)
# NAME out of zone
update = slave.update(zone)
update.add("forwarded.", 1, "TXT", "forwarded")
update.send("NOTZONE")
resp = master.dig("forwarded.", "TXT")
resp.check(rcode="REFUSED")
t.sleep(3)
t.xfr_diff(master, slave, zone)
t.end()<|fim▁end|> |
t.start()
|
<|file_name|>it.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
if (typeof define === "function" && define.amd) {
define('element/locale/it', ['module', 'exports'], factory);
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.ELEMENT.lang = global.ELEMENT.lang || {};
global.ELEMENT.lang.it = mod.exports;
}
})(this, function (module, exports) {
'use strict';
exports.__esModule = true;
exports.default = {
el: {
colorpicker: {
confirm: 'OK',
clear: 'Pulisci'
},
datepicker: {
now: 'Ora',
today: 'Oggi',
cancel: 'Cancella',
clear: 'Pulisci',
confirm: 'OK',
selectDate: 'Seleziona data',
selectTime: 'Seleziona ora',
startDate: 'Data inizio',
startTime: 'Ora inizio',
endDate: 'Data fine',<|fim▁hole|> nextMonth: 'Mese successivo',
year: '',
month1: 'Gennaio',
month2: 'Febbraio',
month3: 'Marzo',
month4: 'Aprile',
month5: 'Maggio',
month6: 'Giugno',
month7: 'Luglio',
month8: 'Agosto',
month9: 'Settembre',
month10: 'Ottobre',
month11: 'Novembre',
month12: 'Dicembre',
// week: 'settimana',
weeks: {
sun: 'Dom',
mon: 'Lun',
tue: 'Mar',
wed: 'Mer',
thu: 'Gio',
fri: 'Ven',
sat: 'Sab'
},
months: {
jan: 'Gen',
feb: 'Feb',
mar: 'Mar',
apr: 'Apr',
may: 'Mag',
jun: 'Giu',
jul: 'Lug',
aug: 'Ago',
sep: 'Set',
oct: 'Ott',
nov: 'Nov',
dec: 'Dic'
}
},
select: {
loading: 'Caricamento',
noMatch: 'Nessuna corrispondenza',
noData: 'Nessun dato',
placeholder: 'Seleziona'
},
cascader: {
noMatch: 'Nessuna corrispondenza',
loading: 'Caricamento',
placeholder: 'Seleziona'
},
pagination: {
goto: 'Vai a',
pagesize: '/page',
total: 'Totale {total}',
pageClassifier: ''
},
messagebox: {
confirm: 'OK',
cancel: 'Cancella',
error: 'Input non valido'
},
upload: {
deleteTip: 'Premi cancella per rimuovere',
delete: 'Cancella',
preview: 'Anteprima',
continue: 'Continua'
},
table: {
emptyText: 'Nessun dato',
confirmFilter: 'Conferma',
resetFilter: 'Reset',
clearFilter: 'Tutti',
sumText: 'Somma'
},
tree: {
emptyText: 'Nessun dato'
},
transfer: {
noMatch: 'Nessuna corrispondenza',
noData: 'Nessun dato',
titles: ['Lista 1', 'Lista 2'],
filterPlaceholder: 'Inserisci filtro',
noCheckedFormat: '{total} elementi',
hasCheckedFormat: '{checked}/{total} selezionati'
},
image: {
error: 'FAILED' // to be translated
}
}
};
module.exports = exports['default'];
});<|fim▁end|> | endTime: 'Ora fine',
prevYear: 'Anno precedente',
nextYear: 'Anno successivo',
prevMonth: 'Mese precedente', |
<|file_name|>start_page_service.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/app_list/start_page_service.h"
#include <string>
#include "base/command_line.h"
#include "base/memory/singleton.h"
#include "chrome/browser/chrome_notification_types.h"
#include "chrome/browser/extensions/extension_system_factory.h"
#include "chrome/browser/extensions/install_tracker_factory.h"
#include "chrome/browser/media/media_stream_infobar_delegate.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/ui/app_list/recommended_apps.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/common/extensions/extension.h"
#include "chrome/common/url_constants.h"
#include "components/browser_context_keyed_service/browser_context_dependency_manager.h"
#include "components/browser_context_keyed_service/browser_context_keyed_service_factory.h"
#include "content/public/browser/notification_observer.h"
#include "content/public/browser/notification_registrar.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/web_contents.h"
#include "content/public/browser/web_contents_delegate.h"
namespace app_list {
class StartPageService::Factory : public BrowserContextKeyedServiceFactory {
public:
static StartPageService* GetForProfile(Profile* profile) {
if (!CommandLine::ForCurrentProcess()->HasSwitch(
switches::kShowAppListStartPage)) {
return NULL;
}
return static_cast<StartPageService*>(
GetInstance()->GetServiceForBrowserContext(profile, true));
}
static Factory* GetInstance() {
return Singleton<Factory>::get();
}
private:
friend struct DefaultSingletonTraits<Factory>;
Factory()
: BrowserContextKeyedServiceFactory(
"AppListStartPageService",
BrowserContextDependencyManager::GetInstance()) {
DependsOn(extensions::ExtensionSystemFactory::GetInstance());
DependsOn(extensions::InstallTrackerFactory::GetInstance());
}
virtual ~Factory() {}
// BrowserContextKeyedServiceFactory overrides:
virtual BrowserContextKeyedService* BuildServiceInstanceFor(
content::BrowserContext* context) const OVERRIDE {
Profile* profile = static_cast<Profile*>(context);
return new StartPageService(profile);
}
DISALLOW_COPY_AND_ASSIGN(Factory);
};
class StartPageService::ProfileDestroyObserver
: public content::NotificationObserver {
public:
explicit ProfileDestroyObserver(StartPageService* service)
: service_(service) {
registrar_.Add(this,
chrome::NOTIFICATION_PROFILE_DESTROYED,
content::NotificationService::AllSources());
}
virtual ~ProfileDestroyObserver() {}
private:
// content::NotificationObserver
virtual void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) OVERRIDE {
DCHECK_EQ(chrome::NOTIFICATION_PROFILE_DESTROYED, type);
service_->Shutdown();
}
StartPageService* service_; // Owner of this class.
content::NotificationRegistrar registrar_;
DISALLOW_COPY_AND_ASSIGN(ProfileDestroyObserver);
};
class StartPageService::StartPageWebContentsDelegate
: public content::WebContentsDelegate {
public:
StartPageWebContentsDelegate() {}
virtual ~StartPageWebContentsDelegate() {}
<|fim▁hole|> const content::MediaStreamRequest& request,
const content::MediaResponseCallback& callback) OVERRIDE {
if (MediaStreamInfoBarDelegate::Create(web_contents, request, callback))
NOTREACHED() << "Media stream not allowed for WebUI";
}
private:
DISALLOW_COPY_AND_ASSIGN(StartPageWebContentsDelegate);
};
// static
StartPageService* StartPageService::Get(Profile* profile) {
return Factory::GetForProfile(profile);
}
StartPageService::StartPageService(Profile* profile)
: profile_(profile),
profile_destroy_observer_(new ProfileDestroyObserver(this)),
recommended_apps_(new RecommendedApps(profile)) {
contents_.reset(content::WebContents::Create(
content::WebContents::CreateParams(profile_)));
contents_delegate_.reset(new StartPageWebContentsDelegate());
contents_->SetDelegate(contents_delegate_.get());
GURL url(chrome::kChromeUIAppListStartPageURL);
CommandLine* command_line = CommandLine::ForCurrentProcess();
if (command_line->HasSwitch(switches::kAppListStartPageURL)) {
url = GURL(
command_line->GetSwitchValueASCII(switches::kAppListStartPageURL));
}
contents_->GetController().LoadURL(
url,
content::Referrer(),
content::PAGE_TRANSITION_AUTO_TOPLEVEL,
std::string());
}
StartPageService::~StartPageService() {}
void StartPageService::Shutdown() {
contents_.reset();
}
} // namespace app_list<|fim▁end|> | virtual void RequestMediaAccessPermission(
content::WebContents* web_contents, |
<|file_name|>jpackagesRest__test.py<|end_file_name|><|fim▁begin|>from JumpScale import j
import unittest
import JumpScale.portal
descr = """
test jpackages over rest to portal (appserver)
"""
organization = "jumpscale"
author = "[email protected]"
license = "bsd"
version = "1.0"
category = "appserver.jpackages.rest,portal"
enable=True
priority=5
class TEST(unittest.TestCase):
def setUp(self):
self.client= j.core.portal.getClient("127.0.0.1", 81, "1234") #@need to read from config file for the secret
self.actor = self.client.getActor("system", "packagemanager")
def test_getJpackages(self):
l1=self.actor.getJPackages(j.application.whoAmI.nid)
print l1
l2=self.actor.getJPackages(j.application.whoAmI.nid,"jumpscale")
print l2
def test_getJpackageInfo(self):
jp=self.actor.getJPackageInfo(j.application.whoAmI.nid,"jumpscale","osis")
print jp
def test_getJpackageFilesInfo(self):
info=self.actor.getJPackageFilesInfo(j.application.whoAmI.nid,"jumpscale","osis")
# print info<|fim▁hole|>
def test_action(self):
info=self.actor.action(j.application.whoAmI.nid,domain="jumpscale",pname="osis",action="start")
print info
#@todo finish tests and make better<|fim▁end|> | |
<|file_name|>SVGFECompositeElement-dom-k1-attr.js<|end_file_name|><|fim▁begin|>// [Name] SVGFECompositeElement-dom-k1-attr.js
// [Expected rendering result] Four circle with different opacity merged with feComposite filter - and a series of PASS messages
description("Tests dynamic updates of the 'k1' attribute of the SVGFECompositeElement object")
createSVGTestCase();
var defsElement = createSVGElement("defs");
rootSVGElement.appendChild(defsElement);
var off1 = createSVGElement("feOffset");
off1.setAttribute("dx", "35");
off1.setAttribute("dy", "25");
off1.setAttribute("result", "off1");
var flood1 = createSVGElement("feFlood");
flood1.setAttribute("flood-color", "#408067");
flood1.setAttribute("flood-opacity", ".8");
flood1.setAttribute("result", "F1");
var overComposite1 = createSVGElement("feComposite");
overComposite1.setAttribute("in", "F1");
overComposite1.setAttribute("in2", "off1");
overComposite1.setAttribute("operator", "arithmetic");
overComposite1.setAttribute("k1", "1.9");
overComposite1.setAttribute("k2", ".1");
overComposite1.setAttribute("k3", ".5");
overComposite1.setAttribute("k4", ".3");
overComposite1.setAttribute("result", "C1");
var off2 = createSVGElement("feOffset");
off2.setAttribute("in", "SourceGraphic");
off2.setAttribute("dx", "60");
off2.setAttribute("dy", "50");
off2.setAttribute("result", "off2");
var flood2 = createSVGElement("feFlood");
flood2.setAttribute("flood-color", "#408067");
flood2.setAttribute("flood-opacity", ".6");
flood2.setAttribute("result", "F2");
var overComposite2 = createSVGElement("feComposite");
overComposite2.setAttribute("in", "F2");
overComposite2.setAttribute("in2", "off2");
overComposite2.setAttribute("operator", "in");
overComposite2.setAttribute("result", "C2");
var off3 = createSVGElement("feOffset");
off3.setAttribute("in", "SourceGraphic");
off3.setAttribute("dx", "85");
off3.setAttribute("dy", "75");
off3.setAttribute("result", "off3");
var flood3 = createSVGElement("feFlood");
flood3.setAttribute("flood-color", "#408067");
flood3.setAttribute("flood-opacity", ".4");
flood3.setAttribute("result", "F3");<|fim▁hole|>var overComposite3 = createSVGElement("feComposite");
overComposite3.setAttribute("in2", "off3");
overComposite3.setAttribute("operator", "in");
overComposite3.setAttribute("result", "C3");
var merge = createSVGElement("feMerge");
var mergeNode1 = createSVGElement("feMergeNode");
mergeNode1.setAttribute("in", "C1");
var mergeNode2 = createSVGElement("feMergeNode");
mergeNode2.setAttribute("in", "C2");
var mergeNode3 = createSVGElement("feMergeNode");
mergeNode3.setAttribute("in", "C3");
var mergeNode4 = createSVGElement("feMergeNode");
mergeNode4.setAttribute("in", "SourceGraphic");
merge.appendChild(mergeNode3);
merge.appendChild(mergeNode2);
merge.appendChild(mergeNode1);
merge.appendChild(mergeNode4);
var overFilter = createSVGElement("filter");
overFilter.setAttribute("id", "overFilter");
overFilter.setAttribute("filterUnits", "objectBoundingBox");
overFilter.setAttribute("x", "0");
overFilter.setAttribute("y", "0");
overFilter.setAttribute("width", "3.5");
overFilter.setAttribute("height", "4");
overFilter.appendChild(off1);
overFilter.appendChild(flood1);
overFilter.appendChild(overComposite1);
overFilter.appendChild(off2);
overFilter.appendChild(flood2);
overFilter.appendChild(overComposite2);
overFilter.appendChild(off3);
overFilter.appendChild(flood3);
overFilter.appendChild(overComposite3);
overFilter.appendChild(merge);
defsElement.appendChild(overFilter);
rootSVGElement.setAttribute("height", "200");
var rect1 = createSVGElement("circle");
rect1.setAttribute("cx", "100");
rect1.setAttribute("cy", "50");
rect1.setAttribute("r", "50");
rect1.setAttribute("fill", "#408067");
rect1.setAttribute("filter", "url(#overFilter)");
rootSVGElement.appendChild(rect1);
shouldBeEqualToString("overComposite1.getAttribute('k1')", "1.9");
function repaintTest() {
overComposite1.setAttribute("k1", ".5");
shouldBeEqualToString("overComposite1.getAttribute('k1')", ".5");
completeTest();
}
var successfullyParsed = true;<|fim▁end|> | |
<|file_name|>async_common.py<|end_file_name|><|fim▁begin|># Copyright (c) Meta Platforms, Inc. and affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pyre-unsafe
"""
Common base for asyncio and Trollius (the Python 2 asyncio backport).
Ideally this would be all that's necessary but we can't use the async/await
syntax on Python 2 so we had to abstract coroutines away.
Look for them in TAsyncioServer and TTrolliusServer respectively.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import struct
import warnings
from collections import defaultdict
from io import BytesIO
import six
import thrift
from thrift.protocol.THeaderProtocol import THeaderProtocolFactory
from thrift.server.TServer import TConnectionContext
from thrift.Thrift import (
TApplicationException,
TMessageType,
)
from thrift.transport.THeaderTransport import (
THeaderTransport,
HEADER_FLAG,
MAX_FRAME_SIZE,
CLIENT_TYPE,
)
from thrift.transport.TTransport import (
TTransportBase,
TTransportException,
)
if six.PY3:
import asyncio
else:
# pyre-fixme[21]: Could not find module `trollius`.
import trollius as asyncio
# We support the deprecated FRAMED transport for old fb303
# clients that were otherwise failing miserably.
THEADER_CLIENT_TYPES = {
CLIENT_TYPE.HEADER,
CLIENT_TYPE.FRAMED_DEPRECATED,
}
_default_thpfactory = THeaderProtocolFactory(client_types=THEADER_CLIENT_TYPES)
THeaderProtocol = _default_thpfactory.getProtocol
logger = logging.getLogger(__name__)
if six.PY2:
class PermissionError(IOError):
pass
class ResourceWarning(RuntimeWarning):
pass
class TReadOnlyBuffer(TTransportBase):
"""Leaner version of TMemoryBuffer that is resettable."""
def __init__(self, value=b""):
self._open = True
self._value = value
self.reset()
def isOpen(self):
return self._open
def close(self):
self._io.close()
self._open = False
def read(self, sz):
return self._io.read(sz)
def write(self, buf):
raise PermissionError("This is a read-only buffer")
def reset(self):
self._io = BytesIO(self._value)
class TWriteOnlyBuffer(TTransportBase):
"""Leaner version of TMemoryBuffer that is resettable."""
def __init__(self):
self._open = True
self.reset()
def isOpen(self):
return self._open
def close(self):
self._io.close()
self._open = False
def read(self, sz):
raise EOFError("This is a write-only buffer")
def write(self, buf):
self._io.write(buf)
def getvalue(self):
return self._io.getvalue()
def reset(self):
self._io = BytesIO()
class TReadWriteBuffer(TTransportBase):
"""TMemoryBuffer equivalent with separate buffers to read and write."""
def __init__(self, value=b""):
self._read_io = TReadOnlyBuffer(value=value)
self._write_io = TWriteOnlyBuffer()
self.read = self._read_io.read
self.write = self._write_io.write
self.getvalue = self._write_io.getvalue
self.reset()
def isOpen(self):
return self._read_io._open and self._write_io._open
def close(self):
self._read_io.close()
self._write_io.close()
def reset(self):
self._read_io.reset()
self._write_io.reset()
# Note: read()/write()/getvalue() methods are bound in __init__().
class WrappedTransport(TWriteOnlyBuffer):
"""Wraps an asyncio.Transport in a Thrift Transport interface."""
MAX_QUEUE_SIZE = 1024
def __init__(self, trans, proto, loop):
super(WrappedTransport, self).__init__()
self._trans = trans
self._proto = proto
self._loop = loop
self._queue = asyncio.Queue(
maxsize=self.MAX_QUEUE_SIZE,
loop=self._loop,
)
self._consumer = self._loop.create_task(self._send())
self._producers = []
async def _send(self):
raise NotImplementedError
def send_message(self, msg):
self._producers.append(
self._loop.create_task(self._queue.put(msg)),
)
def flush(self):
msg = self.getvalue()
tmi = TReadOnlyBuffer(msg)
iprot = THeaderProtocol(tmi)
fname, mtype, seqid = iprot.readMessageBegin()
fname = fname.decode()
self._proto.schedule_timeout(fname, seqid)
self.send_message(msg)
self.reset()
def _clean_producers(self):
self._producers = [
p for p in self._producers if not p.done() and not p.cancelled()
]
def close(self):
try:
self._consumer.cancel()
for producer in self._producers:
if not producer.done() and not producer.cancelled():
producer.cancel()
super(WrappedTransport, self).close()
self._consumer = None
del self._producers
finally:
self._trans.close()
def __del__(self):
if self._consumer and (
not self._consumer.done() or not self._consumer.cancelled()
):
logger.debug(
"WrappedTransport did not finish properly"
" as the consumer asyncio.Task is still pending."
" Make sure to call .close() on this object."
)
if self.isOpen():
warnings.warn(
"WrappedTransport is being garbage collected"
" while still open."
" Make sure to call .close() on this object.",
ResourceWarning,
)
# pyre-fixme[11]: Annotation `Protocol` is not defined as a type.
class FramedProtocol(asyncio.Protocol):
"""Unpacks Thrift frames and reads them asynchronously."""
def __init__(self, loop=None):
self.loop = loop or asyncio.get_event_loop()
self.recvd = b""
async def message_received(self, frame):
raise NotImplementedError
def data_received(self, data):
"""Implements asyncio.Protocol.data_received."""
self.recvd = self.recvd + data
while len(self.recvd) >= 4:
(length,) = struct.unpack("!I", self.recvd[:4])
if length > MAX_FRAME_SIZE:
logger.error(
"Frame size %d too large for THeaderProtocol",
length,
)
self.transport.close()
return
elif length == 0:
logger.error("Empty frame")
self.transport.close()
return
if len(self.recvd) < length + 4:
return
frame = self.recvd[0 : 4 + length]
self.recvd = self.recvd[4 + length :]
self.loop.create_task(self.message_received(frame))
def eof_received(self):
"""Implements asyncio.Protocol.eof_received."""
return self.connection_lost(exc=None)
# Don't forget to implement connection_made/connection_lost in your
# subclass. There's also pause_writing/resume_writing but it seems we're<|fim▁hole|>
class ThriftHeaderClientProtocolBase(FramedProtocol):
"""asyncio THeader protocol wrapper for client use.
This is abstract, missing implementation of an async TTransport
wrapper and the `message_received` coroutine function.
"""
DEFAULT_TIMEOUT = 60.0
THEADER_PROTOCOL_FACTORY = THeaderProtocolFactory
_exception_serializer = None
def __init__(
self,
client_class,
loop=None,
timeouts=None,
client_type=None,
):
super(ThriftHeaderClientProtocolBase, self).__init__(loop=loop)
self.client_class = client_class
if timeouts is None:
timeouts = {}
default_timeout = timeouts.get("") or self.DEFAULT_TIMEOUT
self.timeouts = defaultdict(lambda: default_timeout)
self.timeouts.update(timeouts)
self.client_type = client_type
self.client = None
self.pending_tasks = {}
self.transport = None # TTransport wrapping an asyncio.Transport
async def message_received(self, frame):
self._handle_message(frame, clear_timeout=True)
async def timeout_task(self, fname, delay):
# timeout_task must to be implemented in a subclass
raise NotImplementedError
def _handle_timeout(self, fname, seqid):
exc = TApplicationException(
TApplicationException.TIMEOUT, "Call to {} timed out".format(fname)
)
serialized_exc = self.serialize_texception(fname, seqid, exc)
self._handle_message(serialized_exc, clear_timeout=False)
def wrapAsyncioTransport(self, asyncio_transport):
raise NotImplementedError
def connection_made(self, transport):
"""Implements asyncio.Protocol.connection_made."""
assert self.transport is None, "Thrift transport already instantiated here."
assert self.client is None, "Client already instantiated here."
self.transport = self.wrapAsyncioTransport(transport)
thrift_protocol = self.THEADER_PROTOCOL_FACTORY(
client_type=self.client_type,
).getProtocol(self.transport)
thrift_protocol.trans.set_header_flag(HEADER_FLAG.SUPPORT_OUT_OF_ORDER)
self.client = self.client_class(thrift_protocol, self.loop)
def connection_lost(self, exc):
"""Implements asyncio.Protocol.connection_lost."""
te = TTransportException(
type=TTransportException.END_OF_FILE, message="Connection closed"
)
self.fail_all_futures(te)
def fail_all_futures(self, exc):
for fut in self.client._futures.values():
if not fut.done():
fut.set_exception(exc)
def _handle_message(self, frame, clear_timeout):
try:
tmi = TReadOnlyBuffer(frame)
iprot = self.THEADER_PROTOCOL_FACTORY(
client_type=self.client_type,
).getProtocol(tmi)
(fname, mtype, seqid) = iprot.readMessageBegin()
except TTransportException as ex:
self.fail_all_futures(ex)
self.transport.close()
return
except Exception as ex:
te = TTransportException(
type=TTransportException.END_OF_FILE, message=str(ex)
)
self.fail_all_futures(te)
self.transport.close()
return
if clear_timeout:
try:
timeout_task = self.pending_tasks.pop(seqid)
except KeyError:
# Task doesn't have a timeout or has already been cancelled
# and pruned from `pending_tasks`.
pass
else:
timeout_task.cancel()
self._handle_message_received(iprot, fname, mtype, seqid)
def _handle_message_received(self, iprot, fname, mtype, seqid):
method = getattr(self.client, "recv_" + fname.decode(), None)
if method is None:
logger.error("Method %r is not supported", fname)
self.close()
return
try:
method(iprot, mtype, seqid)
except (
asyncio.CancelledError,
asyncio.InvalidStateError,
) as e:
logger.warning("Method %r cancelled: %s", fname, str(e))
def update_pending_tasks(self, seqid, task):
no_longer_pending = [
_seqid
for _seqid, _task in self.pending_tasks.items()
if _task.done() or _task.cancelled()
]
for _seqid in no_longer_pending:
del self.pending_tasks[_seqid]
assert seqid not in self.pending_tasks, "seqid already pending for timeout"
self.pending_tasks[seqid] = task
def schedule_timeout(self, fname, seqid):
timeout = self.timeouts[fname]
if not timeout:
return
timeout_task = asyncio.Task(
self.timeout_task(fname, seqid, delay=timeout),
loop=self.loop,
)
self.update_pending_tasks(seqid, timeout_task)
def close(self):
for task in self.pending_tasks.values():
if not task.done() and not task.cancelled():
task.cancel()
if not self.transport:
return
try:
# Closing the wrapped sender transport will cascade closing
# of the underlying tranports, too.
self.transport.close()
except Exception:
pass
@classmethod
def serialize_texception(cls, fname, seqid, exception):
"""This saves us a bit of processing time for timeout handling by
reusing the Thrift structs involved in exception serialization.
NOTE: this is not thread-safe nor it is meant to be.
"""
# the serializer is a singleton
if cls._exception_serializer is None:
buffer = TWriteOnlyBuffer()
transport = THeaderTransport(buffer)
cls._exception_serializer = THeaderProtocol(transport)
else:
transport = cls._exception_serializer.trans
buffer = transport.getTransport()
buffer.reset()
serializer = cls._exception_serializer
serializer.writeMessageBegin(fname, TMessageType.EXCEPTION, seqid)
exception.write(serializer)
serializer.writeMessageEnd()
serializer.trans.flush()
return buffer.getvalue()
class AsyncioRpcConnectionContext(TConnectionContext):
def __init__(self, client_socket):
self._client_socket = client_socket
def getPeerName(self):
return self._client_socket.getpeername()
def getSockName(self):
return self._client_socket.getsockname()<|fim▁end|> | # fine without it. |
<|file_name|>destroy_test.js<|end_file_name|><|fim▁begin|>import isEnabled from 'ember-metal/features';
import run from 'ember-metal/run_loop';
import { observer } from 'ember-metal/mixin';
import { set } from 'ember-metal/property_set';
import { bind } from 'ember-metal/binding';
import {
beginPropertyChanges,
endPropertyChanges
} from 'ember-metal/property_events';
import { testBoth } from 'ember-metal/tests/props_helper';
import EmberObject from 'ember-runtime/system/object';
import { peekMeta } from 'ember-metal/meta';
QUnit.module('ember-runtime/system/object/destroy_test');
testBoth('should schedule objects to be destroyed at the end of the run loop', function(get, set) {
var obj = EmberObject.create();
var meta;
run(function() {
obj.destroy();
meta = peekMeta(obj);
ok(meta, 'meta is not destroyed immediately');
ok(get(obj, 'isDestroying'), 'object is marked as destroying immediately');
ok(!get(obj, 'isDestroyed'), 'object is not destroyed immediately');
});
meta = peekMeta(obj);
ok(!meta, 'meta is destroyed after run loop finishes');
ok(get(obj, 'isDestroyed'), 'object is destroyed after run loop finishes');
});
if (isEnabled('mandatory-setter')) {
// MANDATORY_SETTER moves value to meta.values
// a destroyed object removes meta but leaves the accessor
// that looks it up
QUnit.test('should raise an exception when modifying watched properties on a destroyed object', function() {
var obj = EmberObject.extend({
fooDidChange: observer('foo', function() { })
}).create({
foo: 'bar'
});
run(function() {
obj.destroy();
});
throws(function() {
set(obj, 'foo', 'baz');
}, Error, 'raises an exception');
});
}
QUnit.test('observers should not fire after an object has been destroyed', function() {
var count = 0;
var obj = EmberObject.extend({
fooDidChange: observer('foo', function() {
count++;
})
}).create();
obj.set('foo', 'bar');
equal(count, 1, 'observer was fired once');
run(function() {
beginPropertyChanges();
obj.set('foo', 'quux');
obj.destroy();
endPropertyChanges();
});
equal(count, 1, 'observer was not called after object was destroyed');
});
QUnit.test('destroyed objects should not see each others changes during teardown but a long lived object should', function () {
var shouldChange = 0;
var shouldNotChange = 0;
var objs = {};
var A = EmberObject.extend({
objs: objs,
isAlive: true,
willDestroy() {
this.set('isAlive', false);
},
bDidChange: observer('objs.b.isAlive', function () {
shouldNotChange++;
}),
cDidChange: observer('objs.c.isAlive', function () {
shouldNotChange++;
})
});
var B = EmberObject.extend({
objs: objs,
isAlive: true,
willDestroy() {
this.set('isAlive', false);
},
aDidChange: observer('objs.a.isAlive', function () {
shouldNotChange++;
}),
cDidChange: observer('objs.c.isAlive', function () {
shouldNotChange++;
})
});
var C = EmberObject.extend({
objs: objs,
isAlive: true,
willDestroy() {
this.set('isAlive', false);
},
aDidChange: observer('objs.a.isAlive', function () {
shouldNotChange++;
}),
bDidChange: observer('objs.b.isAlive', function () {
shouldNotChange++;
})
});
var LongLivedObject = EmberObject.extend({
objs: objs,
isAliveDidChange: observer('objs.a.isAlive', function () {
shouldChange++;
})
});
objs.a = new A();
objs.b = new B();
objs.c = new C();
new LongLivedObject();
run(function () {
var keys = Object.keys(objs);
for (var i = 0; i < keys.length; i++) {
objs[keys[i]].destroy();
}
});
equal(shouldNotChange, 0, 'destroyed graph objs should not see change in willDestroy');
equal(shouldChange, 1, 'long lived should see change in willDestroy');
});
QUnit.test('bindings should be synced when are updated in the willDestroy hook', function() {
var bar = EmberObject.create({
value: false,
willDestroy() {
this.set('value', true);
}
});
var foo = EmberObject.create({
value: null,
bar: bar
});
<|fim▁hole|> bind(foo, 'value', 'bar.value');
});
ok(bar.get('value') === false, 'the initial value has been bound');
run(function() {
bar.destroy();
});
ok(foo.get('value'), 'foo is synced when the binding is updated in the willDestroy hook');
});<|fim▁end|> | run(function() { |
<|file_name|>Autonomous.java<|end_file_name|><|fim▁begin|>/*
To Do:
Fix Reverse Driving
Make only one side fire (right)
*/
/* Copyright (c) 2014, 2015 Qualcomm Technologies Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of Qualcomm Technologies Inc nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.*;
import com.qualcomm.robotcore.util.ElapsedTime;
import com.qualcomm.robotcore.hardware.*;
//red turns left
//blue turns right
/*
To Do;
Double gears on shooter
Rotate Block and Top Part of Beacon Pusher 90 degrees. The servo end position is currently level
with the end of the robot instead of sideways
*/
import java.text.SimpleDateFormat;
import java.util.Date;
import static android.os.SystemClock.sleep;
/**
* Registers OpCode and Initializes Variables
*/
@com.qualcomm.robotcore.eventloop.opmode.Autonomous(name = "Autonomous α", group = "FTC772")
public class Autonomous extends LinearOpMode {
private ElapsedTime runtime = new ElapsedTime();
private DcMotor frontLeft, frontRight, intake, dispenserLeft, dispenserRight, liftLeft, liftRight, midtake;
private Servo dispenser, beaconAngleLeft, beaconAngleRight, forkliftLeft, forkliftRight;
private boolean drivingForward = true;
//private boolean init = false;
//private final double DISPENSER_POWER = 1;
private double BEACON_LEFT_IN;
private double BEACON_RIGHT_IN;
private final int INITIAL_FORWARD = 1000;
private final int RAMP_UP = 1000;
private final int TURN_ONE = 300;
private final int FORWARD_TWO = 500;
private final int TURN_TWO = 300;
private final int FORWARD_THREE = 300;
private final int COLOR_CORRECTION = 50;
private final int FORWARD_FOUR = 400;
private final int TURN_THREE = 500;
private final int FORWARD_FIVE = 500;
private final boolean isRed = true;
private boolean didColorCorrection = false;
private boolean wasChangingAngle = false;
private ColorSensor colorSensor;
private TouchSensor leftTouchSensor, rightTouchSensor;
// @Override
// public void init() {
// /*
// Initialize DcMotors
// */
// frontLeft = hardwareMap.dcMotor.get("frontLeft");
// frontRight = hardwareMap.dcMotor.get("frontRight");
//
// //intake = hardwareMap.dcMotor.get("intake");
// dispenserLeft = hardwareMap.dcMotor.get("dispenserLeft");
// dispenserRight = hardwareMap.dcMotor.get("dispenserRight");
//
// /*
// Initialize Servos
// */
// dispenserAngle = hardwareMap.servo.get("dispenserAngle");
// beaconAngle = hardwareMap.servo.get("beaconAngle");
//
//
// /*
// Initialize Sensors
// */
// colorSensor = hardwareMap.colorSensor.get("colorSensor");
// leftTouchSensor = hardwareMap.touchSensor.get("leftTouchSensor");
// rightTouchSensor = hardwareMap.touchSensor.get("rightTouchSensor");
//
// //Display completion message
// telemetry.addData("Status", "Initialized");
// }
/*
* Code to run when the op mode is first enabled goes here
* @see com.qualcomm.robotcore.eventloop.opmode.OpMode#start()
@Override
public void init_loop() {
}*/
/*
* This method will be called ONCE when start is pressed
* @see com.qualcomm.robotcore.eventloop.opmode.OpMode#loop()
*/
/*
public void start() {
/*
Initialize all motors/servos to position
*/
//runtime.reset();
//dispenserAngle.setPosition(DEFAULT_ANGLE);
// }
/*
* This method will be called repeatedly in a loop
* @see com.qualcomm.robotcore.eventloop.opmode.OpMode#loop()
*/
@Override
public void runOpMode() throws InterruptedException {
frontLeft = hardwareMap.dcMotor.get("frontLeft");
frontRight = hardwareMap.dcMotor.get("frontRight");
intake = hardwareMap.dcMotor.get("intake");
midtake = hardwareMap.dcMotor.get("midtake");
dispenserLeft = hardwareMap.dcMotor.get("dispenserLeft");
dispenserRight = hardwareMap.dcMotor.get("dispenserRight");
dispenserLeft.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.FLOAT);
dispenserRight.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.FLOAT);
liftLeft = hardwareMap.dcMotor.get("liftLeft");
liftRight = hardwareMap.dcMotor.get("liftRight");
liftLeft.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftLeft.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
liftRight.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftRight.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
/*
Initialize Servos
*/
dispenser = hardwareMap.servo.get("dispenser");
beaconAngleLeft = hardwareMap.servo.get("beaconAngleLeft");
beaconAngleRight = hardwareMap.servo.get("beaconAngleRight");
forkliftLeft = hardwareMap.servo.get("forkliftLeft");
forkliftRight = hardwareMap.servo.get("forkliftRight");
/*
Initialize Sensors
*/
//colorSensor = hardwareMap.colorSensor.get("colorSensor");
//leftTouchSensor = hardwareMap.touchSensor.get("leftTouchSensor");
//rightTouchSensor = hardwareMap.touchSensor.get("rightTouchSensor");
//Display completion message
telemetry.addData("Status", "Initialized");
/*
Steps to Autonomous:
Fire starting balls
Drive to beacon 1
Press beacon 1
Drive to beacon 2
Press beacon 2
Drive to center and park while knocking ball off
*/
frontLeft.setPower(1);
frontRight.setPower(-1);
sleep(INITIAL_FORWARD);
frontLeft.setPower(0);
frontRight.setPower(0);
dispenserLeft.setPower(1);
dispenserRight.setPower(1);
sleep(RAMP_UP);
intake.setPower(1);
midtake.setPower(1);
dispenser.setPosition(0);
sleep(500);
dispenser.setPosition(.45);
sleep(150);
dispenser.setPosition(0);
sleep(500);
dispenser.setPosition(.45);
intake.setPower(0);
midtake.setPower(0);
dispenserRight.setPower(0);
dispenserLeft.setPower(0);
if (isRed) {
frontLeft.setPower(1);
frontRight.setPower(1);
sleep(TURN_ONE);
frontRight.setPower(-1);
}
else {
frontLeft.setPower(-1);
frontRight.setPower(-1);
sleep(TURN_ONE);
frontLeft.setPower(1);
}
sleep(FORWARD_TWO);
if (!isRed) {
frontLeft.setPower(-1);
sleep(TURN_TWO);
frontLeft.setPower(1);
}
else {
frontRight.setPower(1);
sleep(TURN_TWO);
frontRight.setPower(-1);
}
sleep(FORWARD_THREE);
frontLeft.setPower(0);
frontRight.setPower(0);
if (!isRed) {
if (colorSensor.red()<colorSensor.blue()) {
beaconAngleRight.setPosition(Math.abs(.5-BEACON_RIGHT_IN));
}
else {
frontLeft.setPower(1);
frontRight.setPower(-1);
sleep(COLOR_CORRECTION);
didColorCorrection = true;
frontLeft.setPower(0);
frontRight.setPower(0);
beaconAngleRight.setPosition(Math.abs(.5-BEACON_RIGHT_IN));
}
}
else {
if (colorSensor.red()>colorSensor.blue()) {
beaconAngleLeft.setPosition(Math.abs(.5-BEACON_LEFT_IN));
}
else {
frontLeft.setPower(1);
frontRight.setPower(-1);
sleep(COLOR_CORRECTION);
didColorCorrection = true;
frontLeft.setPower(0);
frontRight.setPower(0);
beaconAngleLeft.setPosition(Math.abs(.5-BEACON_LEFT_IN));
}
}
frontLeft.setPower(1);
frontRight.setPower(-1);
if (didColorCorrection) {
sleep(FORWARD_FOUR-COLOR_CORRECTION);
}
else {
sleep(FORWARD_FOUR);
}
frontLeft.setPower(0);
frontRight.setPower(0);
if (!isRed) {
if (colorSensor.red()<colorSensor.blue()) {
beaconAngleRight.setPosition(Math.abs(.5-BEACON_RIGHT_IN));
}
else {
frontLeft.setPower(1);
frontRight.setPower(-1);
sleep(COLOR_CORRECTION);
frontLeft.setPower(0);
frontRight.setPower(0);
beaconAngleRight.setPosition(Math.abs(.5-BEACON_RIGHT_IN));
}
}
else {
if (colorSensor.red()>colorSensor.blue()) {
beaconAngleLeft.setPosition(Math.abs(.5-BEACON_LEFT_IN));
}
else {
<|fim▁hole|> frontLeft.setPower(1);
frontRight.setPower(-1);
sleep(COLOR_CORRECTION);
frontLeft.setPower(0);
frontRight.setPower(0);
beaconAngleLeft.setPosition(Math.abs(.5-BEACON_LEFT_IN));
}
}
frontLeft.setPower(1);
frontRight.setPower(1);
sleep(TURN_THREE);
frontRight.setPower(-1);
sleep(FORWARD_FIVE);
telemetry.addData("Status", "Run Time: " + runtime.toString());
/*
This section is the short version of the autonomous for in case the other part doesn't work.
It drives straight forward and knocks the cap ball off in the center.
*/
sleep(10000);
frontLeft.setPower(1);
frontRight.setPower(-1);
sleep(4000);
frontRight.setPower(0);
frontLeft.setPower(0);
sleep(10000);
}
}<|fim▁end|> | |
<|file_name|>relations.js<|end_file_name|><|fim▁begin|>var pub = {},
Q,
Knex;
module.exports = function ($inject) {
$inject = $inject || {};
Q = $inject.Q;
Knex = $inject.Knex;
return pub;
};
pub.get = function(tableName) {
var q = Q.defer();
pub.getMetadata(tableName)
.then(function(relations) {
q.resolve(relations[0]);
});
return q.promise;
};
pub.getMetadata = function(tableName) {
return Knex.knex.raw('SELECT ' +
'KCU1.CONSTRAINT_NAME AS FK_CONSTRAINT_NAME, ' +
'KCU1.COLUMN_NAME AS FK_COLUMN_NAME, ' +
'KCU2.CONSTRAINT_NAME AS REFERENCED_CONSTRAINT_NAME, ' +
'KCU2.TABLE_NAME AS REFERENCED_TABLE_NAME, ' +
'KCU2.COLUMN_NAME AS REFERENCED_COLUMN_NAME ' +
'FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC ' +
'INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU1 ' +
'ON KCU1.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG ' +
'AND KCU1.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA ' +<|fim▁hole|> 'AND KCU2.CONSTRAINT_SCHEMA = RC.UNIQUE_CONSTRAINT_SCHEMA ' +
'AND KCU2.CONSTRAINT_NAME = RC.UNIQUE_CONSTRAINT_NAME ' +
'AND KCU2.ORDINAL_POSITION = KCU1.ORDINAL_POSITION ' +
'AND KCU2.TABLE_NAME = RC.REFERENCED_TABLE_NAME ' +
'WHERE kcu1.table_name = ?', tableName);
};<|fim▁end|> | 'AND KCU1.CONSTRAINT_NAME = RC.CONSTRAINT_NAME ' +
'AND KCU1.TABLE_NAME = RC.TABLE_NAME ' +
'INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU2 ' +
'ON KCU2.CONSTRAINT_CATALOG = RC.UNIQUE_CONSTRAINT_CATALOG ' + |
<|file_name|>block.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Layout for CSS block-level elements.
//!
//! As a terminology note, the term *absolute positioning* here refers to elements with position
//! `absolute` or `fixed`. The term *positioned element* refers to elements with position
//! `relative`, `absolute`, and `fixed`. The term *containing block* (occasionally abbreviated as
//! *CB*) is the containing block for the current flow, which differs from the static containing
//! block if the flow is absolutely-positioned.
//!
//! "CSS 2.1" or "CSS 2.2" refers to the editor's draft of the W3C "Cascading Style Sheets Level 2
//! Revision 2 (CSS 2.2) Specification" available here:
//!
//! http://dev.w3.org/csswg/css2/
//!
//! "INTRINSIC" refers to L. David Baron's "More Precise Definitions of Inline Layout and Table
//! Layout" available here:
//!
//! http://dbaron.org/css/intrinsic/
//!
//! "CSS-SIZING" refers to the W3C "CSS Intrinsic & Extrinsic Sizing Module Level 3" document
//! available here:
//!
//! http://dev.w3.org/csswg/css-sizing/
#![deny(unsafe_code)]
use context::LayoutContext;
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
use display_list_builder::{FragmentDisplayListBuilding};
use euclid::{Point2D, Rect, Size2D};
use floats::{ClearType, FloatKind, Floats, PlacementInfo};
use flow::{BLOCK_POSITION_IS_STATIC};
use flow::{CLEARS_LEFT, CLEARS_RIGHT};
use flow::{HAS_LEFT_FLOATED_DESCENDANTS, HAS_RIGHT_FLOATED_DESCENDANTS};
use flow::{IMPACTED_BY_LEFT_FLOATS, IMPACTED_BY_RIGHT_FLOATS, INLINE_POSITION_IS_STATIC};
use flow::{IS_ABSOLUTELY_POSITIONED};
use flow::{ImmutableFlowUtils, LateAbsolutePositionInfo, MutableFlowUtils, OpaqueFlow};
use flow::{LAYERS_NEEDED_FOR_DESCENDANTS, NEEDS_LAYER};
use flow::{PostorderFlowTraversal, PreorderFlowTraversal, mut_base};
use flow::{self, BaseFlow, EarlyAbsolutePositionInfo, Flow, FlowClass, ForceNonfloatedFlag};
use flow_ref;
use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, HAS_LAYER};
use fragment::{SpecificFragmentInfo};
use gfx::display_list::{ClippingRegion, DisplayList};
use incremental::{REFLOW, REFLOW_OUT_OF_FLOW};
use layout_debug;
use layout_task::DISPLAY_PORT_SIZE_FACTOR;
use model::{CollapsibleMargins, MaybeAuto, specified, specified_or_none};
use model::{IntrinsicISizes, MarginCollapseInfo};
use msg::compositor_msg::{LayerId, LayerType};
use rustc_serialize::{Encodable, Encoder};
use std::cmp::{max, min};
use std::fmt;
use std::sync::Arc;
use style::computed_values::{border_collapse, box_sizing, display, float, overflow_x, overflow_y};
use style::computed_values::{position, text_align, transform, transform_style};
use style::properties::ComputedValues;
use style::values::computed::{LengthOrNone, LengthOrPercentageOrNone};
use style::values::computed::{LengthOrPercentage, LengthOrPercentageOrAuto};
use util::geometry::{Au, MAX_AU, MAX_RECT};
use util::logical_geometry::{LogicalPoint, LogicalRect, LogicalSize, WritingMode};
use util::opts;
use wrapper::PseudoElementType;
/// Information specific to floated blocks.
#[derive(Clone, RustcEncodable)]
pub struct FloatedBlockInfo {
/// The amount of inline size that is available for the float.
pub containing_inline_size: Au,
/// The float ceiling, relative to `BaseFlow::position::cur_b` (i.e. the top part of the border
/// box).
pub float_ceiling: Au,
/// Left or right?
pub float_kind: FloatKind,
}
impl FloatedBlockInfo {
pub fn new(float_kind: FloatKind) -> FloatedBlockInfo {
FloatedBlockInfo {
containing_inline_size: Au(0),
float_ceiling: Au(0),
float_kind: float_kind,
}
}
}
/// The solutions for the block-size-and-margins constraint equation.
#[derive(Copy, Clone)]
struct BSizeConstraintSolution {
block_start: Au,
block_size: Au,
margin_block_start: Au,
margin_block_end: Au
}
impl BSizeConstraintSolution {
fn new(block_start: Au,
block_size: Au,
margin_block_start: Au,
margin_block_end: Au)
-> BSizeConstraintSolution {
BSizeConstraintSolution {
block_start: block_start,
block_size: block_size,
margin_block_start: margin_block_start,
margin_block_end: margin_block_end,
}
}
/// Solve the vertical constraint equation for absolute non-replaced elements.
///
/// CSS Section 10.6.4
/// Constraint equation:
/// block-start + block-end + block-size + margin-block-start + margin-block-end
/// = absolute containing block block-size - (vertical padding and border)
/// [aka available_block-size]
///
/// Return the solution for the equation.
fn solve_vertical_constraints_abs_nonreplaced(block_size: MaybeAuto,
block_start_margin: MaybeAuto,
block_end_margin: MaybeAuto,
block_start: MaybeAuto,
block_end: MaybeAuto,
content_block_size: Au,
available_block_size: Au)
-> BSizeConstraintSolution {
let (block_start, block_size, margin_block_start, margin_block_end) =
match (block_start, block_end, block_size) {
(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
// Now it is the same situation as block-start Specified and block-end
// and block-size Auto.
let block_size = content_block_size;
// Use a dummy value for `block_start`, since it has the static position.
(Au(0), block_size, margin_block_start, margin_block_end)
}
(MaybeAuto::Specified(block_start),
MaybeAuto::Specified(block_end),
MaybeAuto::Specified(block_size)) => {
match (block_start_margin, block_end_margin) {
(MaybeAuto::Auto, MaybeAuto::Auto) => {
let total_margin_val =
available_block_size - block_start - block_end - block_size;
(block_start,
block_size,
total_margin_val.scale_by(0.5),
total_margin_val.scale_by(0.5))
}
(MaybeAuto::Specified(margin_block_start), MaybeAuto::Auto) => {
let sum = block_start + block_end + block_size + margin_block_start;
(block_start,
block_size,
margin_block_start,
available_block_size - sum)
}
(MaybeAuto::Auto, MaybeAuto::Specified(margin_block_end)) => {
let sum = block_start + block_end + block_size + margin_block_end;
(block_start, block_size, available_block_size - sum, margin_block_end)
}
(MaybeAuto::Specified(margin_block_start),
MaybeAuto::Specified(margin_block_end)) => {
// Values are over-constrained. Ignore value for 'block-end'.
(block_start, block_size, margin_block_start, margin_block_end)
}
}
}
// For the rest of the cases, auto values for margin are set to 0
// If only one is Auto, solve for it
(MaybeAuto::Auto,
MaybeAuto::Specified(block_end),
MaybeAuto::Specified(block_size)) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
let sum = block_end + block_size + margin_block_start + margin_block_end;
(available_block_size - sum, block_size, margin_block_start, margin_block_end)
}
(MaybeAuto::Specified(block_start),
MaybeAuto::Auto,
MaybeAuto::Specified(block_size)) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
(block_start, block_size, margin_block_start, margin_block_end)
}
(MaybeAuto::Specified(block_start),
MaybeAuto::Specified(block_end),
MaybeAuto::Auto) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
let sum = block_start + block_end + margin_block_start + margin_block_end;
(block_start, available_block_size - sum, margin_block_start, margin_block_end)
}
// If block-size is auto, then block-size is content block-size. Solve for the
// non-auto value.
(MaybeAuto::Specified(block_start), MaybeAuto::Auto, MaybeAuto::Auto) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
let block_size = content_block_size;
(block_start, block_size, margin_block_start, margin_block_end)
}
(MaybeAuto::Auto, MaybeAuto::Specified(block_end), MaybeAuto::Auto) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
let block_size = content_block_size;
let sum = block_end + block_size + margin_block_start + margin_block_end;
(available_block_size - sum, block_size, margin_block_start, margin_block_end)
}<|fim▁hole|> let margin_block_end = block_end_margin.specified_or_zero();
// Use a dummy value for `block_start`, since it has the static position.
(Au(0), block_size, margin_block_start, margin_block_end)
}
};
BSizeConstraintSolution::new(block_start, block_size, margin_block_start, margin_block_end)
}
/// Solve the vertical constraint equation for absolute replaced elements.
///
/// Assumption: The used value for block-size has already been calculated.
///
/// CSS Section 10.6.5
/// Constraint equation:
/// block-start + block-end + block-size + margin-block-start + margin-block-end
/// = absolute containing block block-size - (vertical padding and border)
/// [aka available block-size]
///
/// Return the solution for the equation.
fn solve_vertical_constraints_abs_replaced(block_size: Au,
block_start_margin: MaybeAuto,
block_end_margin: MaybeAuto,
block_start: MaybeAuto,
block_end: MaybeAuto,
_: Au,
available_block_size: Au)
-> BSizeConstraintSolution {
let (block_start, block_size, margin_block_start, margin_block_end) =
match (block_start, block_end) {
(MaybeAuto::Auto, MaybeAuto::Auto) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
// Use a dummy value for `block_start`, since it has the static position.
(Au(0), block_size, margin_block_start, margin_block_end)
}
(MaybeAuto::Specified(block_start), MaybeAuto::Specified(block_end)) => {
match (block_start_margin, block_end_margin) {
(MaybeAuto::Auto, MaybeAuto::Auto) => {
let total_margin_val = available_block_size - block_start - block_end -
block_size;
(block_start,
block_size,
total_margin_val.scale_by(0.5),
total_margin_val.scale_by(0.5))
}
(MaybeAuto::Specified(margin_block_start), MaybeAuto::Auto) => {
let sum = block_start + block_end + block_size + margin_block_start;
(block_start,
block_size,
margin_block_start,
available_block_size - sum)
}
(MaybeAuto::Auto, MaybeAuto::Specified(margin_block_end)) => {
let sum = block_start + block_end + block_size + margin_block_end;
(block_start, block_size, available_block_size - sum, margin_block_end)
}
(MaybeAuto::Specified(margin_block_start),
MaybeAuto::Specified(margin_block_end)) => {
// Values are over-constrained. Ignore value for 'block-end'.
(block_start, block_size, margin_block_start, margin_block_end)
}
}
}
// If only one is Auto, solve for it
(MaybeAuto::Auto, MaybeAuto::Specified(block_end)) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
let sum = block_end + block_size + margin_block_start + margin_block_end;
(available_block_size - sum, block_size, margin_block_start, margin_block_end)
}
(MaybeAuto::Specified(block_start), MaybeAuto::Auto) => {
let margin_block_start = block_start_margin.specified_or_zero();
let margin_block_end = block_end_margin.specified_or_zero();
(block_start, block_size, margin_block_start, margin_block_end)
}
};
BSizeConstraintSolution::new(block_start, block_size, margin_block_start, margin_block_end)
}
}
/// Performs block-size calculations potentially multiple times, taking
/// (assuming an horizontal writing mode) `height`, `min-height`, and `max-height`
/// into account. After each call to `next()`, the caller must call `.try()` with the
/// current calculated value of `height`.
///
/// See CSS 2.1 § 10.7.
pub struct CandidateBSizeIterator {
block_size: MaybeAuto,
max_block_size: Option<Au>,
min_block_size: Au,
pub candidate_value: Au,
status: CandidateBSizeIteratorStatus,
}
impl CandidateBSizeIterator {
/// Creates a new candidate block-size iterator. `block_container_block-size` is `None` if the block-size
/// of the block container has not been determined yet. It will always be `Some` in the case of
/// absolutely-positioned containing blocks.
pub fn new(fragment: &Fragment, block_container_block_size: Option<Au>)
-> CandidateBSizeIterator {
// Per CSS 2.1 § 10.7, (assuming an horizontal writing mode,)
// percentages in `min-height` and `max-height` refer to the height of
// the containing block.
// If that is not determined yet by the time we need to resolve
// `min-height` and `max-height`, percentage values are ignored.
let block_size = match (fragment.style.content_block_size(), block_container_block_size) {
(LengthOrPercentageOrAuto::Percentage(percent), Some(block_container_block_size)) => {
MaybeAuto::Specified(block_container_block_size.scale_by(percent))
}
(LengthOrPercentageOrAuto::Calc(calc), Some(block_container_block_size)) => {
MaybeAuto::Specified(calc.length() + block_container_block_size.scale_by(calc.percentage()))
}
(LengthOrPercentageOrAuto::Percentage(_), None) |
(LengthOrPercentageOrAuto::Auto, _) |
(LengthOrPercentageOrAuto::Calc(_), _) => MaybeAuto::Auto,
(LengthOrPercentageOrAuto::Length(length), _) => MaybeAuto::Specified(length),
};
let max_block_size = match (fragment.style.max_block_size(), block_container_block_size) {
(LengthOrPercentageOrNone::Percentage(percent), Some(block_container_block_size)) => {
Some(block_container_block_size.scale_by(percent))
}
(LengthOrPercentageOrNone::Percentage(_), None) |
(LengthOrPercentageOrNone::None, _) => None,
(LengthOrPercentageOrNone::Length(length), _) => Some(length),
};
let min_block_size = match (fragment.style.min_block_size(), block_container_block_size) {
(LengthOrPercentage::Percentage(percent), Some(block_container_block_size)) => {
block_container_block_size.scale_by(percent)
}
(LengthOrPercentage::Calc(calc), Some(block_container_block_size)) => {
calc.length() + block_container_block_size.scale_by(calc.percentage())
}
(LengthOrPercentage::Calc(calc), None) => calc.length(),
(LengthOrPercentage::Percentage(_), None) => Au(0),
(LengthOrPercentage::Length(length), _) => length,
};
// If the style includes `box-sizing: border-box`, subtract the border and padding.
let adjustment_for_box_sizing = match fragment.style.get_box().box_sizing {
box_sizing::T::border_box => fragment.border_padding.block_start_end(),
box_sizing::T::content_box => Au(0),
};
return CandidateBSizeIterator {
block_size: block_size.map(|size| adjust(size, adjustment_for_box_sizing)),
max_block_size: max_block_size.map(|size| adjust(size, adjustment_for_box_sizing)),
min_block_size: adjust(min_block_size, adjustment_for_box_sizing),
candidate_value: Au(0),
status: CandidateBSizeIteratorStatus::Initial,
};
fn adjust(size: Au, delta: Au) -> Au {
max(size - delta, Au(0))
}
}
}
impl Iterator for CandidateBSizeIterator {
type Item = MaybeAuto;
fn next(&mut self) -> Option<MaybeAuto> {
self.status = match self.status {
CandidateBSizeIteratorStatus::Initial => CandidateBSizeIteratorStatus::Trying,
CandidateBSizeIteratorStatus::Trying => {
match self.max_block_size {
Some(max_block_size) if self.candidate_value > max_block_size => {
CandidateBSizeIteratorStatus::TryingMax
}
_ if self.candidate_value < self.min_block_size => {
CandidateBSizeIteratorStatus::TryingMin
}
_ => CandidateBSizeIteratorStatus::Found,
}
}
CandidateBSizeIteratorStatus::TryingMax => {
if self.candidate_value < self.min_block_size {
CandidateBSizeIteratorStatus::TryingMin
} else {
CandidateBSizeIteratorStatus::Found
}
}
CandidateBSizeIteratorStatus::TryingMin | CandidateBSizeIteratorStatus::Found => {
CandidateBSizeIteratorStatus::Found
}
};
match self.status {
CandidateBSizeIteratorStatus::Trying => Some(self.block_size),
CandidateBSizeIteratorStatus::TryingMax => {
Some(MaybeAuto::Specified(self.max_block_size.unwrap()))
}
CandidateBSizeIteratorStatus::TryingMin => {
Some(MaybeAuto::Specified(self.min_block_size))
}
CandidateBSizeIteratorStatus::Found => None,
CandidateBSizeIteratorStatus::Initial => panic!(),
}
}
}
enum CandidateBSizeIteratorStatus {
Initial,
Trying,
TryingMax,
TryingMin,
Found,
}
// A helper function used in block-size calculation.
fn translate_including_floats(cur_b: &mut Au, delta: Au, floats: &mut Floats) {
*cur_b = *cur_b + delta;
let writing_mode = floats.writing_mode;
floats.translate(LogicalSize::new(writing_mode, Au(0), -delta));
}
/// The real assign-block-sizes traversal for flows with position 'absolute'.
///
/// This is a traversal of an Absolute Flow tree.
/// - Relatively positioned flows and the Root flow start new Absolute flow trees.
/// - The kids of a flow in this tree will be the flows for which it is the
/// absolute Containing Block.
/// - Thus, leaf nodes and inner non-root nodes are all Absolute Flows.
///
/// A Flow tree can have several Absolute Flow trees (depending on the number
/// of relatively positioned flows it has).
///
/// Note that flows with position 'fixed' just form a flat list as they all
/// have the Root flow as their CB.
pub struct AbsoluteAssignBSizesTraversal<'a>(pub &'a LayoutContext<'a>);
impl<'a> PreorderFlowTraversal for AbsoluteAssignBSizesTraversal<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
{
// The root of the absolute flow tree is definitely not absolutely
// positioned. Nothing to process here.
let flow: &Flow = flow;
if flow.contains_roots_of_absolute_flow_tree() {
return;
}
if !flow.is_block_like() {
return
}
}
let block = flow.as_mut_block();
debug_assert!(block.base.flags.contains(IS_ABSOLUTELY_POSITIONED));
if !block.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) {
return
}
let AbsoluteAssignBSizesTraversal(ref layout_context) = *self;
block.calculate_absolute_block_size_and_margins(*layout_context);
}
}
/// The store-overflow traversal particular to absolute flows.
///
/// Propagate overflow up the Absolute flow tree and update overflow up to and
/// not including the root of the Absolute flow tree.
/// After that, it is up to the normal store-overflow traversal to propagate
/// it further up.
pub struct AbsoluteStoreOverflowTraversal<'a>{
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for AbsoluteStoreOverflowTraversal<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
{
// This will be taken care of by the normal store-overflow traversal.
let flow: &Flow = flow;
if flow.contains_roots_of_absolute_flow_tree() {
return;
}
}
flow.mutate_fragments(&mut |f: &mut Fragment| {
match f.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let block = flow_ref::deref_mut(&mut info.flow_ref);
(block.as_mut_block() as &mut Flow).early_store_overflow(self.layout_context);
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let block = flow_ref::deref_mut(&mut info.flow_ref);
(block.as_mut_block() as &mut Flow).early_store_overflow(self.layout_context);
}
_ => (),
}
});
flow.early_store_overflow(self.layout_context);
}
}
pub enum BlockType {
Replaced,
NonReplaced,
AbsoluteReplaced,
AbsoluteNonReplaced,
FloatReplaced,
FloatNonReplaced,
InlineBlockReplaced,
InlineBlockNonReplaced,
}
#[derive(Clone, PartialEq)]
pub enum MarginsMayCollapseFlag {
MarginsMayCollapse,
MarginsMayNotCollapse,
}
#[derive(PartialEq)]
enum FormattingContextType {
None,
Block,
Other,
}
// Propagates the `layers_needed_for_descendants` flag appropriately from a child. This is called
// as part of block-size assignment.
//
// If any fixed descendants of kids are present, this kid needs a layer.
//
// FIXME(#2006, pcwalton): This is too layer-happy. Like WebKit, we shouldn't do this unless
// the positioned descendants are actually on top of the fixed kids.
//
// TODO(#1244, #2007, pcwalton): Do this for CSS transforms and opacity too, at least if they're
// animating.
pub fn propagate_layer_flag_from_child(layers_needed_for_descendants: &mut bool, kid: &mut Flow) {
if kid.is_absolute_containing_block() {
let kid_base = flow::mut_base(kid);
if kid_base.flags.contains(NEEDS_LAYER) {
*layers_needed_for_descendants = true
}
} else {
let kid_base = flow::mut_base(kid);
if kid_base.flags.contains(LAYERS_NEEDED_FOR_DESCENDANTS) {
*layers_needed_for_descendants = true
}
}
}
// A block formatting context.
#[derive(RustcEncodable)]
pub struct BlockFlow {
/// Data common to all flows.
pub base: BaseFlow,
/// The associated fragment.
pub fragment: Fragment,
/// The sum of the inline-sizes of all logically left floats that precede this block. This is
/// used to speculatively lay out block formatting contexts.
inline_size_of_preceding_left_floats: Au,
/// The sum of the inline-sizes of all logically right floats that precede this block. This is
/// used to speculatively lay out block formatting contexts.
inline_size_of_preceding_right_floats: Au,
/// Additional floating flow members.
pub float: Option<Box<FloatedBlockInfo>>,
/// Various flags.
pub flags: BlockFlowFlags,
}
bitflags! {
flags BlockFlowFlags: u8 {
#[doc = "If this is set, then this block flow is the root flow."]
const IS_ROOT = 0x01,
}
}
impl Encodable for BlockFlowFlags {
fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> {
self.bits().encode(e)
}
}
impl BlockFlow {
pub fn from_fragment(fragment: Fragment, float_kind: Option<FloatKind>) -> BlockFlow {
let writing_mode = fragment.style().writing_mode;
BlockFlow {
base: BaseFlow::new(Some(fragment.style()), writing_mode, match float_kind {
Some(_) => ForceNonfloatedFlag::FloatIfNecessary,
None => ForceNonfloatedFlag::ForceNonfloated,
}),
fragment: fragment,
inline_size_of_preceding_left_floats: Au(0),
inline_size_of_preceding_right_floats: Au(0),
float: float_kind.map(|kind| box FloatedBlockInfo::new(kind)),
flags: BlockFlowFlags::empty(),
}
}
/// Return the type of this block.
///
/// This determines the algorithm used to calculate inline-size, block-size, and the
/// relevant margins for this Block.
pub fn block_type(&self) -> BlockType {
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
if self.is_replaced_content() {
BlockType::AbsoluteReplaced
} else {
BlockType::AbsoluteNonReplaced
}
} else if self.base.flags.is_float() {
if self.is_replaced_content() {
BlockType::FloatReplaced
} else {
BlockType::FloatNonReplaced
}
} else if self.is_inline_block() {
if self.is_replaced_content() {
BlockType::InlineBlockReplaced
} else {
BlockType::InlineBlockNonReplaced
}
} else {
if self.is_replaced_content() {
BlockType::Replaced
} else {
BlockType::NonReplaced
}
}
}
pub fn transform_requires_layer(&self) -> bool {
// Check if the transform matrix is 2D or 3D
if let Some(ref transform_list) = self.fragment.style().get_effects().transform.0 {
for transform in transform_list {
match *transform {
transform::ComputedOperation::Perspective(..) => {
return true;
}
transform::ComputedOperation::Matrix(m) => {
// See http://dev.w3.org/csswg/css-transforms/#2d-matrix
if m.m31 != 0.0 || m.m32 != 0.0 ||
m.m13 != 0.0 || m.m23 != 0.0 ||
m.m43 != 0.0 || m.m14 != 0.0 ||
m.m24 != 0.0 || m.m34 != 0.0 ||
m.m33 != 1.0 || m.m44 != 1.0 {
return true;
}
}
_ => {}
}
}
}
// Neither perspective nor transform present
false
}
/// Compute the actual inline size and position for this block.
pub fn compute_used_inline_size(&mut self,
layout_context: &LayoutContext,
containing_block_inline_size: Au) {
let block_type = self.block_type();
match block_type {
BlockType::AbsoluteReplaced => {
let inline_size_computer = AbsoluteReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::AbsoluteNonReplaced => {
let inline_size_computer = AbsoluteNonReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::FloatReplaced => {
let inline_size_computer = FloatReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::FloatNonReplaced => {
let inline_size_computer = FloatNonReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::InlineBlockReplaced => {
let inline_size_computer = InlineBlockReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::InlineBlockNonReplaced => {
let inline_size_computer = InlineBlockNonReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::Replaced => {
let inline_size_computer = BlockReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
BlockType::NonReplaced => {
let inline_size_computer = BlockNonReplaced;
inline_size_computer.compute_used_inline_size(self,
layout_context,
containing_block_inline_size);
}
}
}
/// Return this flow's fragment.
pub fn fragment(&mut self) -> &mut Fragment {
&mut self.fragment
}
/// Return the size of the containing block for the given immediate absolute descendant of this
/// flow.
///
/// Right now, this only gets the containing block size for absolutely positioned elements.
/// Note: We assume this is called in a top-down traversal, so it is ok to reference the CB.
#[inline]
pub fn containing_block_size(&self, viewport_size: &Size2D<Au>, descendant: OpaqueFlow)
-> LogicalSize<Au> {
debug_assert!(self.base.flags.contains(IS_ABSOLUTELY_POSITIONED));
if self.is_fixed() {
// Initial containing block is the CB for the root
LogicalSize::from_physical(self.base.writing_mode, *viewport_size)
} else {
self.base.absolute_cb.generated_containing_block_size(descendant)
}
}
/// Return true if this has a replaced fragment.
///
/// Text, Images, Inline Block and Canvas
/// (https://html.spec.whatwg.org/multipage/#replaced-elements) fragments are considered as
/// replaced fragments.
fn is_replaced_content(&self) -> bool {
match self.fragment.specific {
SpecificFragmentInfo::ScannedText(_) |
SpecificFragmentInfo::Image(_) |
SpecificFragmentInfo::Canvas(_) |
SpecificFragmentInfo::InlineBlock(_) => true,
_ => false,
}
}
/// Return shrink-to-fit inline-size.
///
/// This is where we use the preferred inline-sizes and minimum inline-sizes
/// calculated in the bubble-inline-sizes traversal.
pub fn get_shrink_to_fit_inline_size(&self, available_inline_size: Au) -> Au {
let content_intrinsic_inline_sizes = self.content_intrinsic_inline_sizes();
min(content_intrinsic_inline_sizes.preferred_inline_size,
max(content_intrinsic_inline_sizes.minimum_inline_size, available_inline_size))
}
/// If this is the root flow, shifts all kids down and adjusts our size to account for
/// root flow margins, which should never be collapsed according to CSS § 8.3.1.
///
/// TODO(#2017, pcwalton): This is somewhat inefficient (traverses kids twice); can we do
/// better?
fn adjust_fragments_for_collapsed_margins_if_root(&mut self) {
if !self.is_root() {
return
}
let (block_start_margin_value, block_end_margin_value) =
match self.base.collapsible_margins {
CollapsibleMargins::CollapseThrough(_) => {
panic!("Margins unexpectedly collapsed through root flow.")
}
CollapsibleMargins::Collapse(block_start_margin, block_end_margin) => {
(block_start_margin.collapse(), block_end_margin.collapse())
}
CollapsibleMargins::None(block_start, block_end) => (block_start, block_end),
};
// Shift all kids down (or up, if margins are negative) if necessary.
if block_start_margin_value != Au(0) {
for kid in self.base.child_iter() {
let kid_base = flow::mut_base(kid);
kid_base.position.start.b = kid_base.position.start.b + block_start_margin_value
}
}
self.base.position.size.block = self.base.position.size.block + block_start_margin_value +
block_end_margin_value;
self.fragment.border_box.size.block = self.fragment.border_box.size.block + block_start_margin_value +
block_end_margin_value;
}
/// Assign block-size for current flow.
///
/// * Collapse margins for flow's children and set in-flow child flows' block offsets now that
/// we know their block-sizes.
/// * Calculate and set the block-size of the current flow.
/// * Calculate block-size, vertical margins, and block offset for the flow's box using CSS §
/// 10.6.7.
///
/// For absolute flows, we store the calculated content block-size for the flow. We defer the
/// calculation of the other values until a later traversal.
///
/// `inline(always)` because this is only ever called by in-order or non-in-order top-level
/// methods.
#[inline(always)]
pub fn assign_block_size_block_base<'a>(&mut self,
layout_context: &'a LayoutContext<'a>,
margins_may_collapse: MarginsMayCollapseFlag) {
let _scope = layout_debug_scope!("assign_block_size_block_base {:x}",
self.base.debug_id());
if self.base.restyle_damage.contains(REFLOW) {
self.determine_if_layer_needed();
// Our current border-box position.
let mut cur_b = Au(0);
// Absolute positioning establishes a block formatting context. Don't propagate floats
// in or out. (But do propagate them between kids.)
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) ||
margins_may_collapse != MarginsMayCollapseFlag::MarginsMayCollapse {
self.base.floats = Floats::new(self.fragment.style.writing_mode);
}
let mut margin_collapse_info = MarginCollapseInfo::new();
let writing_mode = self.base.floats.writing_mode;
self.base.floats.translate(LogicalSize::new(
writing_mode, -self.fragment.inline_start_offset(), Au(0)));
// The sum of our block-start border and block-start padding.
let block_start_offset = self.fragment.border_padding.block_start;
translate_including_floats(&mut cur_b, block_start_offset, &mut self.base.floats);
let can_collapse_block_start_margin_with_kids =
margins_may_collapse == MarginsMayCollapseFlag::MarginsMayCollapse &&
!self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) &&
self.fragment.border_padding.block_start == Au(0);
margin_collapse_info.initialize_block_start_margin(
&self.fragment,
can_collapse_block_start_margin_with_kids);
// At this point, `cur_b` is at the content edge of our box. Now iterate over children.
let mut floats = self.base.floats.clone();
let mut layers_needed_for_descendants = false;
let thread_id = self.base.thread_id;
for kid in self.base.child_iter() {
if flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) {
// Assume that the *hypothetical box* for an absolute flow starts immediately
// after the block-end border edge of the previous flow.
if flow::base(kid).flags.contains(BLOCK_POSITION_IS_STATIC) {
flow::mut_base(kid).position.start.b = cur_b +
flow::base(kid).collapsible_margins
.block_start_margin_for_noncollapsible_context()
}
kid.place_float_if_applicable(layout_context);
if !flow::base(kid).flags.is_float() {
kid.assign_block_size_for_inorder_child_if_necessary(layout_context,
thread_id);
}
propagate_layer_flag_from_child(&mut layers_needed_for_descendants, kid);
// Skip the collapsing and float processing for absolute flow kids and continue
// with the next flow.
continue
}
// Assign block-size now for the child if it was impacted by floats and we couldn't
// before.
flow::mut_base(kid).floats = floats.clone();
if flow::base(kid).flags.is_float() {
flow::mut_base(kid).position.start.b = cur_b;
{
let kid_block = kid.as_mut_block();
kid_block.float.as_mut().unwrap().float_ceiling =
margin_collapse_info.current_float_ceiling();
}
propagate_layer_flag_from_child(&mut layers_needed_for_descendants, kid);
kid.place_float_if_applicable(layout_context);
let kid_base = flow::mut_base(kid);
floats = kid_base.floats.clone();
continue
}
// If we have clearance, assume there are no floats in.
//
// FIXME(#2008, pcwalton): This could be wrong if we have `clear: left` or `clear:
// right` and there are still floats to impact, of course. But this gets
// complicated with margin collapse. Possibly the right thing to do is to lay out
// the block again in this rare case. (Note that WebKit can lay blocks out twice;
// this may be related, although I haven't looked into it closely.)
if flow::base(kid).flags.clears_floats() {
flow::mut_base(kid).floats = Floats::new(self.fragment.style.writing_mode)
}
// Lay the child out if this was an in-order traversal.
let need_to_process_child_floats =
kid.assign_block_size_for_inorder_child_if_necessary(layout_context,
thread_id);
// Mark flows for layerization if necessary to handle painting order correctly.
propagate_layer_flag_from_child(&mut layers_needed_for_descendants, kid);
// Handle any (possibly collapsed) top margin.
let delta = margin_collapse_info.advance_block_start_margin(
&flow::base(kid).collapsible_margins);
translate_including_floats(&mut cur_b, delta, &mut floats);
// Clear past the floats that came in, if necessary.
let clearance = match (flow::base(kid).flags.contains(CLEARS_LEFT),
flow::base(kid).flags.contains(CLEARS_RIGHT)) {
(false, false) => Au(0),
(true, false) => floats.clearance(ClearType::Left),
(false, true) => floats.clearance(ClearType::Right),
(true, true) => floats.clearance(ClearType::Both),
};
translate_including_floats(&mut cur_b, clearance, &mut floats);
// At this point, `cur_b` is at the border edge of the child.
flow::mut_base(kid).position.start.b = cur_b;
// Now pull out the child's outgoing floats. We didn't do this immediately after
// the `assign_block_size_for_inorder_child_if_necessary` call because clearance on
// a block operates on the floats that come *in*, not the floats that go *out*.
if need_to_process_child_floats {
floats = flow::mut_base(kid).floats.clone()
}
// Move past the child's border box. Do not use the `translate_including_floats`
// function here because the child has already translated floats past its border
// box.
let kid_base = flow::mut_base(kid);
cur_b = cur_b + kid_base.position.size.block;
// Handle any (possibly collapsed) block-end margin.
let delta =
margin_collapse_info.advance_block_end_margin(&kid_base.collapsible_margins);
translate_including_floats(&mut cur_b, delta, &mut floats);
}
// Mark ourselves for layerization if that will be necessary to paint in the proper
// order (CSS 2.1, Appendix E).
self.base.flags.set(LAYERS_NEEDED_FOR_DESCENDANTS, layers_needed_for_descendants);
// Add in our block-end margin and compute our collapsible margins.
let can_collapse_block_end_margin_with_kids =
margins_may_collapse == MarginsMayCollapseFlag::MarginsMayCollapse &&
!self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) &&
self.fragment.border_padding.block_end == Au(0);
let (collapsible_margins, delta) =
margin_collapse_info.finish_and_compute_collapsible_margins(
&self.fragment,
self.base.block_container_explicit_block_size,
can_collapse_block_end_margin_with_kids);
self.base.collapsible_margins = collapsible_margins;
translate_including_floats(&mut cur_b, delta, &mut floats);
// FIXME(#2003, pcwalton): The max is taken here so that you can scroll the page, but
// this is not correct behavior according to CSS 2.1 § 10.5. Instead I think we should
// treat the root element as having `overflow: scroll` and use the layers-based
// scrolling infrastructure to make it scrollable.
let mut block_size = cur_b - block_start_offset;
let is_root = self.is_root();
if is_root {
let screen_size = LogicalSize::from_physical(self.fragment.style.writing_mode,
layout_context.shared.screen_size);
block_size = max(screen_size.block, block_size)
}
if is_root || self.formatting_context_type() != FormattingContextType::None ||
self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
// The content block-size includes all the floats per CSS 2.1 § 10.6.7. The easiest
// way to handle this is to just treat it as clearance.
block_size = block_size + floats.clearance(ClearType::Both);
}
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
// Store the content block-size for use in calculating the absolute flow's
// dimensions later.
//
// FIXME(pcwalton): This looks not idempotent. Is it?
self.fragment.border_box.size.block = block_size;
}
// Write in the size of the relative containing block for children. (This information
// is also needed to handle RTL.)
for kid in self.base.child_iter() {
flow::mut_base(kid).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: self.fragment.content_box().size,
relative_containing_block_mode: self.fragment.style().writing_mode,
};
kid.late_store_overflow(layout_context)
}
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
return
}
// Compute any explicitly-specified block size.
// Can't use `for` because we assign to `candidate_block_size_iterator.candidate_value`.
let mut candidate_block_size_iterator = CandidateBSizeIterator::new(
&self.fragment,
self.base.block_container_explicit_block_size);
loop {
match candidate_block_size_iterator.next() {
Some(candidate_block_size) => {
candidate_block_size_iterator.candidate_value =
match candidate_block_size {
MaybeAuto::Auto => block_size,
MaybeAuto::Specified(value) => value
}
}
None => break,
}
}
// Adjust `cur_b` as necessary to account for the explicitly-specified block-size.
block_size = candidate_block_size_iterator.candidate_value;
let delta = block_size - (cur_b - block_start_offset);
translate_including_floats(&mut cur_b, delta, &mut floats);
// Take border and padding into account.
let block_end_offset = self.fragment.border_padding.block_end;
translate_including_floats(&mut cur_b, block_end_offset, &mut floats);
// Now that `cur_b` is at the block-end of the border box, compute the final border box
// position.
self.fragment.border_box.size.block = cur_b;
self.fragment.border_box.start.b = Au(0);
if !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
self.base.position.size.block = cur_b;
}
// Store the current set of floats in the flow so that flows that come later in the
// document can access them.
self.base.floats = floats.clone();
self.adjust_fragments_for_collapsed_margins_if_root();
} else {
// We don't need to reflow, but we still need to perform in-order traversals if
// necessary.
let thread_id = self.base.thread_id;
for kid in self.base.child_iter() {
kid.assign_block_size_for_inorder_child_if_necessary(layout_context, thread_id);
}
}
if (&*self as &Flow).contains_roots_of_absolute_flow_tree() {
// Assign block-sizes for all flows in this absolute flow tree.
// This is preorder because the block-size of an absolute flow may depend on
// the block-size of its containing block, which may also be an absolute flow.
(&mut *self as &mut Flow).traverse_preorder_absolute_flows(
&mut AbsoluteAssignBSizesTraversal(layout_context));
// Store overflow for all absolute descendants.
(&mut *self as &mut Flow).traverse_postorder_absolute_flows(
&mut AbsoluteStoreOverflowTraversal {
layout_context: layout_context,
});
}
// Don't remove the dirty bits yet if we're absolutely-positioned, since our final size
// has not been calculated yet. (See `calculate_absolute_block_size_and_margins` for that.)
// Also don't remove the dirty bits if we're a block formatting context since our inline
// size has not yet been computed. (See `assign_inline_position_for_formatting_context()`.)
if (self.base.flags.is_float() ||
self.formatting_context_type() == FormattingContextType::None) &&
!self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
}
}
/// Add placement information about current float flow for use by the parent.
///
/// Also, use information given by parent about other floats to find out our relative position.
///
/// This does not give any information about any float descendants because they do not affect
/// elements outside of the subtree rooted at this float.
///
/// This function is called on a kid flow by a parent. Therefore, `assign_block_size_float` was
/// already called on this kid flow by the traversal function. So, the values used are
/// well-defined.
pub fn place_float(&mut self) {
let block_size = self.fragment.border_box.size.block;
let clearance = match self.fragment.clear() {
None => Au(0),
Some(clear) => self.base.floats.clearance(clear),
};
let float_info: FloatedBlockInfo = (**self.float.as_ref().unwrap()).clone();
// Our `position` field accounts for positive margins, but not negative margins. (See
// calculation of `extra_inline_size_from_margin` below.) Negative margins must be taken
// into account for float placement, however. So we add them in here.
let inline_size_for_float_placement = self.base.position.size.inline +
min(Au(0), self.fragment.margin.inline_start_end());
let info = PlacementInfo {
size: LogicalSize::new(
self.fragment.style.writing_mode,
inline_size_for_float_placement,
block_size + self.fragment.margin.block_start_end())
.convert(self.fragment.style.writing_mode, self.base.floats.writing_mode),
ceiling: clearance + float_info.float_ceiling,
max_inline_size: float_info.containing_inline_size,
kind: float_info.float_kind,
};
// Place the float and return the `Floats` back to the parent flow.
// After, grab the position and use that to set our position.
self.base.floats.add_float(&info);
// FIXME (mbrubeck) Get the correct container size for self.base.floats;
let container_size = Size2D::new(self.base.block_container_inline_size, Au(0));
// Move in from the margin edge, as per CSS 2.1 § 9.5, floats may not overlap anything on
// their margin edges.
let float_offset = self.base.floats.last_float_pos().unwrap()
.convert(self.base.floats.writing_mode,
self.base.writing_mode,
container_size)
.start;
let margin_offset = LogicalPoint::new(self.base.writing_mode,
Au(0),
self.fragment.margin.block_start);
let mut origin = LogicalPoint::new(self.base.writing_mode,
self.base.position.start.i,
self.base.position.start.b);
origin = origin.add_point(&float_offset).add_point(&margin_offset);
self.base.position = LogicalRect::from_point_size(self.base.writing_mode,
origin,
self.base.position.size);
}
pub fn explicit_block_containing_size(&self, layout_context: &LayoutContext) -> Option<Au> {
if self.is_root() || self.is_fixed() {
let screen_size = LogicalSize::from_physical(self.fragment.style.writing_mode,
layout_context.shared.screen_size);
Some(screen_size.block)
} else if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) &&
self.base.block_container_explicit_block_size.is_none() {
self.base.absolute_cb.explicit_block_containing_size(layout_context)
} else {
self.base.block_container_explicit_block_size
}
}
fn explicit_block_size(&self, containing_block_size: Option<Au>) -> Option<Au> {
let content_block_size = self.fragment.style().content_block_size();
match (content_block_size, containing_block_size) {
(LengthOrPercentageOrAuto::Calc(calc), Some(container_size)) => {
Some(container_size.scale_by(calc.percentage()) + calc.length())
}
(LengthOrPercentageOrAuto::Length(length), _) => Some(length),
(LengthOrPercentageOrAuto::Percentage(percent), Some(container_size)) => {
Some(container_size.scale_by(percent))
}
(LengthOrPercentageOrAuto::Percentage(_), None) |
(LengthOrPercentageOrAuto::Calc(_), None) |
(LengthOrPercentageOrAuto::Auto, None) => {
None
}
(LengthOrPercentageOrAuto::Auto, Some(container_size)) => {
let (block_start, block_end) = {
let position = self.fragment.style().logical_position();
(MaybeAuto::from_style(position.block_start, container_size),
MaybeAuto::from_style(position.block_end, container_size))
};
match (block_start, block_end) {
(MaybeAuto::Specified(block_start), MaybeAuto::Specified(block_end)) => {
let available_block_size = container_size - self.fragment.border_padding.block_start_end();
// Non-auto margin-block-start and margin-block-end values have already been
// calculated during assign-inline-size.
let margin = self.fragment.style().logical_margin();
let margin_block_start = match margin.block_start {
LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto,
_ => MaybeAuto::Specified(self.fragment.margin.block_start)
};
let margin_block_end = match margin.block_end {
LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto,
_ => MaybeAuto::Specified(self.fragment.margin.block_end)
};
let margin_block_start = margin_block_start.specified_or_zero();
let margin_block_end = margin_block_end.specified_or_zero();
let sum = block_start + block_end + margin_block_start + margin_block_end;
Some(available_block_size - sum)
}
(_, _) => {
None
}
}
}
}
}
fn calculate_absolute_block_size_and_margins(&mut self, layout_context: &LayoutContext) {
let opaque_self = OpaqueFlow::from_flow(self);
let containing_block_block_size =
self.containing_block_size(&layout_context.shared.screen_size, opaque_self).block;
// This is the stored content block-size value from assign-block-size
let content_block_size = self.fragment.border_box.size.block;
let mut solution = None;
{
// Non-auto margin-block-start and margin-block-end values have already been
// calculated during assign-inline-size.
let margin = self.fragment.style().logical_margin();
let margin_block_start = match margin.block_start {
LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto,
_ => MaybeAuto::Specified(self.fragment.margin.block_start)
};
let margin_block_end = match margin.block_end {
LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto,
_ => MaybeAuto::Specified(self.fragment.margin.block_end)
};
let block_start;
let block_end;
{
let position = self.fragment.style().logical_position();
block_start = MaybeAuto::from_style(position.block_start,
containing_block_block_size);
block_end = MaybeAuto::from_style(position.block_end, containing_block_block_size);
}
let available_block_size = containing_block_block_size -
self.fragment.border_padding.block_start_end();
if self.is_replaced_content() {
// Calculate used value of block-size just like we do for inline replaced elements.
// TODO: Pass in the containing block block-size when Fragment's
// assign-block-size can handle it correctly.
self.fragment.assign_replaced_block_size_if_necessary(Some(containing_block_block_size));
// TODO: Right now, this content block-size value includes the
// margin because of erroneous block-size calculation in fragment.
// Check this when that has been fixed.
let block_size_used_val = self.fragment.border_box.size.block;
solution = Some(BSizeConstraintSolution::solve_vertical_constraints_abs_replaced(
block_size_used_val,
margin_block_start,
margin_block_end,
block_start,
block_end,
content_block_size,
available_block_size))
} else {
let mut candidate_block_size_iterator =
CandidateBSizeIterator::new(&self.fragment, Some(containing_block_block_size));
// Can't use `for` because we assign to
// `candidate_block_size_iterator.candidate_value`.
loop {
match candidate_block_size_iterator.next() {
Some(block_size_used_val) => {
solution = Some(
BSizeConstraintSolution::solve_vertical_constraints_abs_nonreplaced(
block_size_used_val,
margin_block_start,
margin_block_end,
block_start,
block_end,
content_block_size,
available_block_size));
candidate_block_size_iterator.candidate_value
= solution.unwrap().block_size;
}
None => break,
}
}
}
}
let solution = solution.unwrap();
self.fragment.margin.block_start = solution.margin_block_start;
self.fragment.margin.block_end = solution.margin_block_end;
self.fragment.border_box.start.b = Au(0);
if !self.base.flags.contains(BLOCK_POSITION_IS_STATIC) {
self.base.position.start.b = solution.block_start + self.fragment.margin.block_start
}
let block_size = solution.block_size + self.fragment.border_padding.block_start_end();
self.fragment.border_box.size.block = block_size;
self.base.position.size.block = block_size;
self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
}
/// Compute inline size based using the `block_container_inline_size` set by the parent flow.
///
/// This is run in the `AssignISizes` traversal.
fn propagate_and_compute_used_inline_size(&mut self, layout_context: &LayoutContext) {
let containing_block_inline_size = self.base.block_container_inline_size;
self.compute_used_inline_size(layout_context, containing_block_inline_size);
if self.base.flags.is_float() {
self.float.as_mut().unwrap().containing_inline_size = containing_block_inline_size
}
}
/// Assigns the computed inline-start content edge and inline-size to all the children of this
/// block flow. Also computes whether each child will be impacted by floats. The given
/// `callback`, if supplied, will be called once per child; it is currently used to push down
/// column sizes for tables.
///
/// `#[inline(always)]` because this is called only from block or table inline-size assignment
/// and the code for block layout is significantly simpler.
#[inline(always)]
pub fn propagate_assigned_inline_size_to_children<F>(&mut self,
layout_context: &LayoutContext,
inline_start_content_edge: Au,
inline_end_content_edge: Au,
content_inline_size: Au,
mut callback: F)
where F: FnMut(&mut Flow,
usize,
Au,
WritingMode,
&mut Au,
&mut Au) {
// Keep track of whether floats could impact each child.
let mut inline_start_floats_impact_child =
self.base.flags.contains(IMPACTED_BY_LEFT_FLOATS);
let mut inline_end_floats_impact_child =
self.base.flags.contains(IMPACTED_BY_RIGHT_FLOATS);
let flags = self.base.flags.clone();
// Remember the inline-sizes of the last left and right floats, if there were any. These
// are used for estimating the inline-sizes of block formatting contexts. (We estimate that
// the inline-size of any block formatting context that we see will be based on the
// inline-size of the containing block as well as the last float seen before it in each
// direction.)
let mut inline_size_of_preceding_left_floats = Au(0);
let mut inline_size_of_preceding_right_floats = Au(0);
if self.formatting_context_type() == FormattingContextType::None {
if inline_start_content_edge > Au(0) {
inline_size_of_preceding_left_floats =
max(self.inline_size_of_preceding_left_floats - inline_start_content_edge,
Au(0));
}
if inline_end_content_edge > Au(0) {
inline_size_of_preceding_right_floats =
max(self.inline_size_of_preceding_right_floats - inline_end_content_edge,
Au(0));
}
}
let opaque_self = OpaqueFlow::from_flow(self);
// Calculate non-auto block size to pass to children.
let parent_container_size = self.explicit_block_containing_size(layout_context);
let explicit_content_size = self.explicit_block_size(parent_container_size);
// Calculate containing block inline size.
let containing_block_size = if flags.contains(IS_ABSOLUTELY_POSITIONED) {
self.containing_block_size(&layout_context.shared.screen_size, opaque_self).inline
} else {
content_inline_size
};
// FIXME (mbrubeck): Get correct mode for absolute containing block
let containing_block_mode = self.base.writing_mode;
let mut inline_start_margin_edge = inline_start_content_edge;
let mut inline_end_margin_edge = inline_end_content_edge;
let mut iterator = self.base.child_iter().enumerate().peekable();
while let Some((i, kid)) = iterator.next() {
flow::mut_base(kid).block_container_explicit_block_size = explicit_content_size;
// Determine float impaction, and update the inline size speculations if necessary.
if flow::base(kid).flags.contains(CLEARS_LEFT) {
inline_start_floats_impact_child = false;
inline_size_of_preceding_left_floats = Au(0);
}
if flow::base(kid).flags.contains(CLEARS_RIGHT) {
inline_end_floats_impact_child = false;
inline_size_of_preceding_right_floats = Au(0);
}
// Update the speculated inline size if this child is floated.
match flow::base(kid).flags.float_kind() {
float::T::none => {}
float::T::left => {
inline_size_of_preceding_left_floats = inline_size_of_preceding_left_floats +
flow::base(kid).intrinsic_inline_sizes.preferred_inline_size;
}
float::T::right => {
inline_size_of_preceding_right_floats = inline_size_of_preceding_right_floats +
flow::base(kid).intrinsic_inline_sizes.preferred_inline_size;
}
}
// The inline-start margin edge of the child flow is at our inline-start content edge,
// and its inline-size is our content inline-size.
let kid_mode = flow::base(kid).writing_mode;
{
let kid_base = flow::mut_base(kid);
if kid_base.flags.contains(INLINE_POSITION_IS_STATIC) {
kid_base.position.start.i =
if kid_mode.is_bidi_ltr() == containing_block_mode.is_bidi_ltr() {
inline_start_content_edge
} else {
// The kid's inline 'start' is at the parent's 'end'
inline_end_content_edge
};
}
kid_base.block_container_inline_size = content_inline_size;
kid_base.block_container_writing_mode = containing_block_mode;
}
{
let kid_base = flow::mut_base(kid);
inline_start_floats_impact_child = inline_start_floats_impact_child ||
kid_base.flags.contains(HAS_LEFT_FLOATED_DESCENDANTS);
inline_end_floats_impact_child = inline_end_floats_impact_child ||
kid_base.flags.contains(HAS_RIGHT_FLOATED_DESCENDANTS);
kid_base.flags.set(IMPACTED_BY_LEFT_FLOATS, inline_start_floats_impact_child);
kid_base.flags.set(IMPACTED_BY_RIGHT_FLOATS, inline_end_floats_impact_child);
}
if kid.is_block_flow() {
let kid_block = kid.as_mut_block();
kid_block.inline_size_of_preceding_left_floats =
inline_size_of_preceding_left_floats;
kid_block.inline_size_of_preceding_right_floats =
inline_size_of_preceding_right_floats;
}
// Call the callback to propagate extra inline size information down to the child. This
// is currently used for tables.
callback(kid,
i,
content_inline_size,
containing_block_mode,
&mut inline_start_margin_edge,
&mut inline_end_margin_edge);
// Per CSS 2.1 § 16.3.1, text alignment propagates to all children in flow.
//
// TODO(#2018, pcwalton): Do this in the cascade instead.
flow::mut_base(kid).flags.propagate_text_alignment_from_parent(flags.clone());
// Handle `text-indent` on behalf of any inline children that we have. This is
// necessary because any percentages are relative to the containing block, which only
// we know.
if kid.is_inline_flow() {
kid.as_mut_inline().first_line_indentation =
specified(self.fragment.style().get_inheritedtext().text_indent,
containing_block_size);
}
}
}
/// Determines the type of formatting context this is. See the definition of
/// `FormattingContextType`.
fn formatting_context_type(&self) -> FormattingContextType {
let style = self.fragment.style();
if style.get_box().float != float::T::none {
return FormattingContextType::Other
}
match style.get_box().display {
display::T::table_cell |
display::T::table_caption |
display::T::table_row_group |
display::T::table |
display::T::inline_block => {
FormattingContextType::Other
}
_ if style.get_box().overflow_x != overflow_x::T::visible ||
style.get_box().overflow_y != overflow_y::T(overflow_x::T::visible) ||
style.is_multicol() => {
FormattingContextType::Block
}
_ => FormattingContextType::None,
}
}
/// Per CSS 2.1 § 9.5, block formatting contexts' inline widths and positions are affected by
/// the presence of floats. This is the part of the assign-heights traversal that computes
/// the final inline position and width for such flows.
///
/// Note that this is part of the assign-block-sizes traversal, not the assign-inline-sizes
/// traversal as one might expect. That is because, in general, float placement cannot occur
/// until heights are assigned. To work around this unfortunate circular dependency, by the
/// time we get here we have already estimated the width of the block formatting context based
/// on the floats we could see at the time of inline-size assignment. The job of this function,
/// therefore, is not only to assign the final size but also to perform the layout again for
/// this block formatting context if our speculation was wrong.
///
/// FIXME(pcwalton): This code is not incremental-reflow-safe (i.e. not idempotent).
fn assign_inline_position_for_formatting_context(&mut self) {
debug_assert!(self.formatting_context_type() != FormattingContextType::None);
if !self.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) {
return
}
let info = PlacementInfo {
size: self.fragment.border_box.size.convert(self.fragment.style.writing_mode,
self.base.floats.writing_mode),
ceiling: self.base.position.start.b,
max_inline_size: MAX_AU,
kind: FloatKind::Left,
};
// Offset our position by whatever displacement is needed to not impact the floats.
let rect = self.base.floats.place_between_floats(&info);
self.base.position.start.i = self.base.position.start.i + rect.start.i;
// TODO(pcwalton): If the inline-size of this flow is different from the size we estimated
// earlier, lay it out again.
self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
}
fn is_inline_block(&self) -> bool {
self.fragment.style().get_box().display == display::T::inline_block
}
/// Computes the content portion (only) of the intrinsic inline sizes of this flow. This is
/// used for calculating shrink-to-fit width. Assumes that intrinsic sizes have already been
/// computed for this flow.
fn content_intrinsic_inline_sizes(&self) -> IntrinsicISizes {
let surrounding_inline_size = self.fragment.surrounding_intrinsic_inline_size();
IntrinsicISizes {
minimum_inline_size: self.base.intrinsic_inline_sizes.minimum_inline_size -
surrounding_inline_size,
preferred_inline_size: self.base.intrinsic_inline_sizes.preferred_inline_size -
surrounding_inline_size,
}
}
/// Computes intrinsic widths for a block.
pub fn bubble_inline_sizes_for_block(&mut self, consult_children: bool) {
let _scope = layout_debug_scope!("block::bubble_inline_sizes {:x}", self.base.debug_id());
let mut flags = self.base.flags;
flags.remove(HAS_LEFT_FLOATED_DESCENDANTS);
flags.remove(HAS_RIGHT_FLOATED_DESCENDANTS);
// Find the maximum inline-size from children.
let mut computation = self.fragment.compute_intrinsic_inline_sizes();
let (mut left_float_width, mut right_float_width) = (Au(0), Au(0));
let (mut left_float_width_accumulator, mut right_float_width_accumulator) = (Au(0), Au(0));
for kid in self.base.child_iter() {
let is_absolutely_positioned =
flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED);
let child_base = flow::mut_base(kid);
let float_kind = child_base.flags.float_kind();
if !is_absolutely_positioned && consult_children {
computation.content_intrinsic_sizes.minimum_inline_size =
max(computation.content_intrinsic_sizes.minimum_inline_size,
child_base.intrinsic_inline_sizes.minimum_inline_size);
if child_base.flags.contains(CLEARS_LEFT) {
left_float_width = max(left_float_width, left_float_width_accumulator);
left_float_width_accumulator = Au(0)
}
if child_base.flags.contains(CLEARS_RIGHT) {
right_float_width = max(right_float_width, right_float_width_accumulator);
right_float_width_accumulator = Au(0)
}
match float_kind {
float::T::none => {
computation.content_intrinsic_sizes.preferred_inline_size =
max(computation.content_intrinsic_sizes.preferred_inline_size,
child_base.intrinsic_inline_sizes.preferred_inline_size);
}
float::T::left => {
left_float_width_accumulator = left_float_width_accumulator +
child_base.intrinsic_inline_sizes.preferred_inline_size;
}
float::T::right => {
right_float_width_accumulator = right_float_width_accumulator +
child_base.intrinsic_inline_sizes.preferred_inline_size;
}
}
}
flags.union_floated_descendants_flags(child_base.flags);
}
// FIXME(pcwalton): This should consider all float descendants, not just children.
// FIXME(pcwalton): This is not well-spec'd; INTRINSIC specifies to do this, but CSS-SIZING
// says not to. In practice, Gecko and WebKit both do this.
left_float_width = max(left_float_width, left_float_width_accumulator);
right_float_width = max(right_float_width, right_float_width_accumulator);
computation.content_intrinsic_sizes.preferred_inline_size =
max(computation.content_intrinsic_sizes.preferred_inline_size,
left_float_width + right_float_width);
self.base.intrinsic_inline_sizes = computation.finish();
match self.fragment.style().get_box().float {
float::T::none => {}
float::T::left => flags.insert(HAS_LEFT_FLOATED_DESCENDANTS),
float::T::right => flags.insert(HAS_RIGHT_FLOATED_DESCENDANTS),
}
self.base.flags = flags
}
fn determine_if_layer_needed(&mut self) {
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
// Fixed position layers get layers.
if self.is_fixed() {
self.base.flags.insert(NEEDS_LAYER);
return
}
}
// This flow needs a layer if it has a 3d transform, or provides perspective
// to child layers. See http://dev.w3.org/csswg/css-transforms/#3d-rendering-contexts.
let has_3d_transform = self.transform_requires_layer();
let has_perspective = self.fragment.style().get_effects().perspective !=
LengthOrNone::None;
if has_3d_transform || has_perspective {
self.base.flags.insert(NEEDS_LAYER);
return
}
match (self.fragment.style().get_box().overflow_x,
self.fragment.style().get_box().overflow_y.0) {
(overflow_x::T::auto, _) | (overflow_x::T::scroll, _) |
(_, overflow_x::T::auto) | (_, overflow_x::T::scroll) => {
self.base.flags.insert(NEEDS_LAYER);
}
_ => {}
}
}
}
impl Flow for BlockFlow {
fn class(&self) -> FlowClass {
FlowClass::Block
}
fn as_mut_block(&mut self) -> &mut BlockFlow {
self
}
fn as_block(&self) -> &BlockFlow {
self
}
/// Pass 1 of reflow: computes minimum and preferred inline-sizes.
///
/// Recursively (bottom-up) determine the flow's minimum and preferred inline-sizes. When
/// called on this flow, all child flows have had their minimum and preferred inline-sizes set.
/// This function must decide minimum/preferred inline-sizes based on its children's
/// inline-sizes and the dimensions of any fragments it is responsible for flowing.
fn bubble_inline_sizes(&mut self) {
// If this block has a fixed width, just use that for the minimum and preferred width,
// rather than bubbling up children inline width.
let consult_children = match self.fragment.style().get_box().width {
LengthOrPercentageOrAuto::Length(_) => false,
_ => true,
};
self.bubble_inline_sizes_for_block(consult_children)
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent context.
///
/// Dual fragments consume some inline-size first, and the remainder is assigned to all child
/// (block) contexts.
fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("block::assign_inline_sizes {:x}", self.base.debug_id());
if !self.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) {
return
}
debug!("assign_inline_sizes({}): assigning inline_size for flow",
if self.base.flags.is_float() {
"float"
} else {
"block"
});
self.base.floats = Floats::new(self.base.writing_mode);
if self.is_root() {
debug!("Setting root position");
self.base.position.start = LogicalPoint::zero(self.base.writing_mode);
self.base.block_container_inline_size = LogicalSize::from_physical(
self.base.writing_mode, layout_context.shared.screen_size).inline;
self.base.block_container_writing_mode = self.base.writing_mode;
// The root element is never impacted by floats.
self.base.flags.remove(IMPACTED_BY_LEFT_FLOATS);
self.base.flags.remove(IMPACTED_BY_RIGHT_FLOATS);
}
// Our inline-size was set to the inline-size of the containing block by the flow's parent.
// Now compute the real value.
self.propagate_and_compute_used_inline_size(layout_context);
// Formatting contexts are never impacted by floats.
match self.formatting_context_type() {
FormattingContextType::None => {}
FormattingContextType::Block => {
self.base.flags.remove(IMPACTED_BY_LEFT_FLOATS);
self.base.flags.remove(IMPACTED_BY_RIGHT_FLOATS);
// We can't actually compute the inline-size of this block now, because floats
// might affect it. Speculate that its inline-size is equal to the inline-size
// computed above minus the inline-size of the previous left and/or right floats.
//
// (If `max-width` is set, then don't perform this speculation. We guess that the
// page set `max-width` in order to avoid hitting floats. The search box on Google
// SERPs falls into this category.)
if self.fragment.style.max_inline_size() == LengthOrPercentageOrNone::None {
self.fragment.border_box.size.inline =
self.fragment.border_box.size.inline -
self.inline_size_of_preceding_left_floats -
self.inline_size_of_preceding_right_floats;
}
}
FormattingContextType::Other => {
self.base.flags.remove(IMPACTED_BY_LEFT_FLOATS);
self.base.flags.remove(IMPACTED_BY_RIGHT_FLOATS);
}
}
// Move in from the inline-start border edge.
let inline_start_content_edge = self.fragment.border_box.start.i +
self.fragment.border_padding.inline_start;
let padding_and_borders = self.fragment.border_padding.inline_start_end();
// Distance from the inline-end margin edge to the inline-end content edge.
let inline_end_content_edge =
self.fragment.margin.inline_end +
self.fragment.border_padding.inline_end;
let content_inline_size = self.fragment.border_box.size.inline - padding_and_borders;
self.propagate_assigned_inline_size_to_children(layout_context,
inline_start_content_edge,
inline_end_content_edge,
content_inline_size,
|_, _, _, _, _, _| {});
}
fn place_float_if_applicable<'a>(&mut self, _: &'a LayoutContext<'a>) {
if self.base.flags.is_float() {
self.place_float();
}
}
fn assign_block_size_for_inorder_child_if_necessary<'a>(&mut self,
layout_context: &'a LayoutContext<'a>,
parent_thread_id: u8)
-> bool {
if self.base.flags.is_float() {
return false
}
let is_formatting_context = self.formatting_context_type() != FormattingContextType::None;
if !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && is_formatting_context {
self.assign_inline_position_for_formatting_context();
}
if self.base.flags.impacted_by_floats() {
self.base.thread_id = parent_thread_id;
if self.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) {
self.assign_block_size(layout_context);
(self as &mut Flow).early_store_overflow(layout_context);
// Don't remove the restyle damage; `assign_block_size` decides whether that is
// appropriate (which in the case of e.g. absolutely-positioned flows, it is not).
}
return true
}
if is_formatting_context {
// If this is a formatting context and was *not* impacted by floats, then we must
// translate the floats past us.
let writing_mode = self.base.floats.writing_mode;
let delta = self.base.position.size.block;
self.base.floats.translate(LogicalSize::new(writing_mode, Au(0), -delta));
return true
}
false
}
fn assign_block_size<'a>(&mut self, ctx: &'a LayoutContext<'a>) {
if self.is_replaced_content() {
let _scope = layout_debug_scope!("assign_replaced_block_size_if_necessary {:x}",
self.base.debug_id());
// Assign block-size for fragment if it is an image fragment.
let containing_block_block_size =
self.base.block_container_explicit_block_size;
self.fragment.assign_replaced_block_size_if_necessary(containing_block_block_size);
if !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
self.base.position.size.block = self.fragment.border_box.size.block;
}
} else if self.is_root() || self.base.flags.is_float() || self.is_inline_block() {
// Root element margins should never be collapsed according to CSS § 8.3.1.
debug!("assign_block_size: assigning block_size for root flow {:?}",
flow::base(self).debug_id());
self.assign_block_size_block_base(ctx, MarginsMayCollapseFlag::MarginsMayNotCollapse);
} else {
debug!("assign_block_size: assigning block_size for block {:?}",
flow::base(self).debug_id());
self.assign_block_size_block_base(ctx, MarginsMayCollapseFlag::MarginsMayCollapse);
}
}
fn compute_absolute_position(&mut self, layout_context: &LayoutContext) {
if (self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) &&
self.base.late_absolute_position_info.layers_needed_for_positioned_flows) ||
self.base.flags.contains(NEEDS_LAYER) {
self.fragment.flags.insert(HAS_LAYER)
}
// FIXME (mbrubeck): Get the real container size, taking the container writing mode into
// account. Must handle vertical writing modes.
let container_size = Size2D::new(self.base.block_container_inline_size, Au(0));
if self.is_root() {
self.base.clip = ClippingRegion::max();
self.base.stacking_relative_position_of_display_port = MAX_RECT;
}
let transform_style = self.fragment.style().get_used_transform_style();
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
// `overflow: auto` and `overflow: scroll` force creation of layers, since we can only
// scroll layers.
match (self.fragment.style().get_box().overflow_x,
self.fragment.style().get_box().overflow_y.0) {
(overflow_x::T::auto, _) | (overflow_x::T::scroll, _) |
(_, overflow_x::T::auto) | (_, overflow_x::T::scroll) => {
self.base.clip = ClippingRegion::max();
self.base.stacking_relative_position_of_display_port = MAX_RECT;
}
_ => {}
}
let position_start = self.base.position.start.to_physical(self.base.writing_mode,
container_size);
// Compute our position relative to the nearest ancestor stacking context. This will be
// passed down later as part of containing block details for absolute descendants.
let absolute_stacking_relative_position = if self.is_fixed() {
// The viewport is initially at (0, 0).
position_start
} else {
// Absolute position of the containing block + position of absolute
// flow w.r.t. the containing block.
self.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block + position_start
};
if !self.base.writing_mode.is_vertical() {
if !self.base.flags.contains(INLINE_POSITION_IS_STATIC) {
self.base.stacking_relative_position.x = absolute_stacking_relative_position.x
}
if !self.base.flags.contains(BLOCK_POSITION_IS_STATIC) {
self.base.stacking_relative_position.y = absolute_stacking_relative_position.y
}
} else {
if !self.base.flags.contains(INLINE_POSITION_IS_STATIC) {
self.base.stacking_relative_position.y = absolute_stacking_relative_position.y
}
if !self.base.flags.contains(BLOCK_POSITION_IS_STATIC) {
self.base.stacking_relative_position.x = absolute_stacking_relative_position.x
}
}
}
// For relatively-positioned descendants, the containing block formed by a block is just
// the content box. The containing block for absolutely-positioned descendants, on the
// other hand, is only established if we are positioned.
let relative_offset =
self.fragment.relative_position(&self.base
.early_absolute_position_info
.relative_containing_block_size);
if self.contains_positioned_fragments() {
let border_box_origin = (self.fragment.border_box -
self.fragment.style.logical_border_width()).start;
self.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
self.base.stacking_relative_position +
(border_box_origin + relative_offset).to_physical(self.base.writing_mode,
container_size)
}
// Compute absolute position info for children.
let stacking_relative_position_of_absolute_containing_block_for_children =
if self.fragment.establishes_stacking_context() {
let logical_border_width = self.fragment.style().logical_border_width();
let position = LogicalPoint::new(self.base.writing_mode,
logical_border_width.inline_start,
logical_border_width.block_start);
let position = position.to_physical(self.base.writing_mode, container_size);
if self.contains_positioned_fragments() {
position
} else {
// We establish a stacking context but are not positioned. (This will happen
// if, for example, the element has `position: static` but has `opacity` or
// `transform` set.) In this case, absolutely-positioned children will not be
// positioned relative to us but will instead be positioned relative to our
// containing block.
position - self.base.stacking_relative_position
}
} else {
self.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block
};
let late_absolute_position_info_for_children = LateAbsolutePositionInfo {
stacking_relative_position_of_absolute_containing_block:
stacking_relative_position_of_absolute_containing_block_for_children,
layers_needed_for_positioned_flows: self.base
.flags
.contains(LAYERS_NEEDED_FOR_DESCENDANTS),
};
let container_size_for_children =
self.base.position.size.to_physical(self.base.writing_mode);
// Compute the origin and clipping rectangle for children.
let origin_for_children;
let clip_in_child_coordinate_system;
let is_stacking_context = self.fragment.establishes_stacking_context();
if is_stacking_context {
// We establish a stacking context, so the position of our children is vertically
// correct, but has to be adjusted to accommodate horizontal margins. (Note the
// calculation involving `position` below and recall that inline-direction flow
// positions are relative to the edges of the margin box.)
//
// FIXME(pcwalton): Is this vertical-writing-direction-safe?
let margin = self.fragment.margin.to_physical(self.base.writing_mode);
origin_for_children = Point2D::new(-margin.left, Au(0));
clip_in_child_coordinate_system =
self.base.clip.translate(&-self.base.stacking_relative_position);
} else {
let relative_offset = relative_offset.to_physical(self.base.writing_mode);
origin_for_children = self.base.stacking_relative_position + relative_offset;
clip_in_child_coordinate_system = self.base.clip.clone();
}
let stacking_relative_position_of_display_port_for_children =
if is_stacking_context || self.is_root() {
let visible_rect =
match layout_context.shared.visible_rects.get(&self.layer_id()) {
Some(visible_rect) => *visible_rect,
None => Rect::new(Point2D::zero(), layout_context.shared.screen_size),
};
let screen_size = layout_context.shared.screen_size;
visible_rect.inflate(screen_size.width * DISPLAY_PORT_SIZE_FACTOR,
screen_size.height * DISPLAY_PORT_SIZE_FACTOR)
} else if is_stacking_context {
self.base
.stacking_relative_position_of_display_port
.translate(&-self.base.stacking_relative_position)
} else {
self.base.stacking_relative_position_of_display_port
};
let stacking_relative_border_box =
self.fragment
.stacking_relative_border_box(&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Own);
let clip = self.fragment.clipping_region_for_children(
&clip_in_child_coordinate_system,
&stacking_relative_border_box,
self.base.flags.contains(IS_ABSOLUTELY_POSITIONED));
// Process children.
for kid in self.base.child_iter() {
// If this layer preserves the 3d context of children,
// then children will need a render layer.
// TODO(gw): This isn't always correct. In some cases
// this may create extra layers than needed. I think
// there are also some edge cases where children don't
// get a layer when they should.
if transform_style == transform_style::T::preserve_3d {
flow::mut_base(kid).flags.insert(NEEDS_LAYER);
}
if flow::base(kid).flags.contains(INLINE_POSITION_IS_STATIC) ||
flow::base(kid).flags.contains(BLOCK_POSITION_IS_STATIC) {
let kid_base = flow::mut_base(kid);
let physical_position = kid_base.position.to_physical(kid_base.writing_mode,
container_size_for_children);
// Set the inline and block positions as necessary.
if !kid_base.writing_mode.is_vertical() {
if kid_base.flags.contains(INLINE_POSITION_IS_STATIC) {
kid_base.stacking_relative_position.x = origin_for_children.x +
physical_position.origin.x
}
if kid_base.flags.contains(BLOCK_POSITION_IS_STATIC) {
kid_base.stacking_relative_position.y = origin_for_children.y +
physical_position.origin.y
}
} else {
if kid_base.flags.contains(INLINE_POSITION_IS_STATIC) {
kid_base.stacking_relative_position.y = origin_for_children.y +
physical_position.origin.y
}
if kid_base.flags.contains(BLOCK_POSITION_IS_STATIC) {
kid_base.stacking_relative_position.x = origin_for_children.x +
physical_position.origin.x
}
}
}
flow::mut_base(kid).late_absolute_position_info =
late_absolute_position_info_for_children;
flow::mut_base(kid).clip = clip.clone();
flow::mut_base(kid).stacking_relative_position_of_display_port =
stacking_relative_position_of_display_port_for_children;
}
}
fn mark_as_root(&mut self) {
self.flags.insert(IS_ROOT)
}
fn is_root(&self) -> bool {
self.flags.contains(IS_ROOT)
}
/// The 'position' property of this flow.
fn positioning(&self) -> position::T {
self.fragment.style.get_box().position
}
/// Return the dimensions of the containing block generated by this flow for absolutely-
/// positioned descendants. For block flows, this is the padding box.
fn generated_containing_block_size(&self, _: OpaqueFlow) -> LogicalSize<Au> {
(self.fragment.border_box - self.fragment.style().logical_border_width()).size
}
fn layer_id(&self) -> LayerId {
let layer_type = match self.fragment.pseudo {
PseudoElementType::Normal => LayerType::FragmentBody,
PseudoElementType::Before(_) => LayerType::BeforePseudoContent,
PseudoElementType::After(_) => LayerType::AfterPseudoContent
};
LayerId::new_of_type(layer_type, self.fragment.node.id() as usize)
}
fn layer_id_for_overflow_scroll(&self) -> LayerId {
LayerId::new_of_type(LayerType::OverflowScroll, self.fragment.node.id() as usize)
}
fn is_absolute_containing_block(&self) -> bool {
self.contains_positioned_fragments()
}
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) {
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) &&
self.fragment.style().logical_position().inline_start ==
LengthOrPercentageOrAuto::Auto &&
self.fragment.style().logical_position().inline_end ==
LengthOrPercentageOrAuto::Auto {
self.base.position.start.i = inline_position
}
}
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) {
if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) &&
self.fragment.style().logical_position().block_start ==
LengthOrPercentageOrAuto::Auto &&
self.fragment.style().logical_position().block_end ==
LengthOrPercentageOrAuto::Auto {
self.base.position.start.b = block_position
}
}
fn build_display_list(&mut self, layout_context: &LayoutContext) {
self.build_display_list_for_block(box DisplayList::new(),
layout_context,
BorderPaintingMode::Separate);
if opts::get().validate_display_list_geometry {
self.base.validate_display_list_geometry();
}
}
fn repair_style(&mut self, new_style: &Arc<ComputedValues>) {
self.fragment.repair_style(new_style)
}
fn compute_overflow(&self) -> Rect<Au> {
self.fragment.compute_overflow(&self.base
.early_absolute_position_info
.relative_containing_block_size)
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>) {
if !iterator.should_process(&self.fragment) {
return
}
iterator.process(&self.fragment,
level,
&self.fragment
.stacking_relative_border_box(&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Own)
.translate(stacking_context_position));
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
(*mutator)(&mut self.fragment)
}
}
impl fmt::Debug for BlockFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"{:?} - {:x}: frag={:?} ({:?})",
self.class(),
self.base.debug_id(),
self.fragment,
self.base)
}
}
/// The inputs for the inline-sizes-and-margins constraint equation.
#[derive(Debug, Copy, Clone)]
pub struct ISizeConstraintInput {
pub computed_inline_size: MaybeAuto,
pub inline_start_margin: MaybeAuto,
pub inline_end_margin: MaybeAuto,
pub inline_start: MaybeAuto,
pub inline_end: MaybeAuto,
pub text_align: text_align::T,
pub available_inline_size: Au,
}
impl ISizeConstraintInput {
pub fn new(computed_inline_size: MaybeAuto,
inline_start_margin: MaybeAuto,
inline_end_margin: MaybeAuto,
inline_start: MaybeAuto,
inline_end: MaybeAuto,
text_align: text_align::T,
available_inline_size: Au)
-> ISizeConstraintInput {
ISizeConstraintInput {
computed_inline_size: computed_inline_size,
inline_start_margin: inline_start_margin,
inline_end_margin: inline_end_margin,
inline_start: inline_start,
inline_end: inline_end,
text_align: text_align,
available_inline_size: available_inline_size,
}
}
}
/// The solutions for the inline-size-and-margins constraint equation.
#[derive(Copy, Clone, Debug)]
pub struct ISizeConstraintSolution {
pub inline_start: Au,
pub inline_size: Au,
pub margin_inline_start: Au,
pub margin_inline_end: Au
}
impl ISizeConstraintSolution {
pub fn new(inline_size: Au, margin_inline_start: Au, margin_inline_end: Au)
-> ISizeConstraintSolution {
ISizeConstraintSolution {
inline_start: Au(0),
inline_size: inline_size,
margin_inline_start: margin_inline_start,
margin_inline_end: margin_inline_end,
}
}
fn for_absolute_flow(inline_start: Au,
inline_size: Au,
margin_inline_start: Au,
margin_inline_end: Au)
-> ISizeConstraintSolution {
ISizeConstraintSolution {
inline_start: inline_start,
inline_size: inline_size,
margin_inline_start: margin_inline_start,
margin_inline_end: margin_inline_end,
}
}
}
// Trait to encapsulate the ISize and Margin calculation.
//
// CSS Section 10.3
pub trait ISizeAndMarginsComputer {
/// Instructs the fragment to compute its border and padding.
fn compute_border_and_padding(&self, block: &mut BlockFlow, containing_block_inline_size: Au) {
block.fragment.compute_border_and_padding(containing_block_inline_size,
border_collapse::T::separate);
}
/// Compute the inputs for the ISize constraint equation.
///
/// This is called only once to compute the initial inputs. For calculations involving
/// minimum and maximum inline-size, we don't need to recompute these.
fn compute_inline_size_constraint_inputs(&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
layout_context: &LayoutContext)
-> ISizeConstraintInput {
let containing_block_inline_size =
self.containing_block_inline_size(block, parent_flow_inline_size, layout_context);
block.fragment.compute_block_direction_margins(containing_block_inline_size);
block.fragment.compute_inline_direction_margins(containing_block_inline_size);
self.compute_border_and_padding(block, containing_block_inline_size);
let mut computed_inline_size = self.initial_computed_inline_size(block,
parent_flow_inline_size,
layout_context);
let style = block.fragment.style();
match (computed_inline_size, style.get_box().box_sizing) {
(MaybeAuto::Specified(size), box_sizing::T::border_box) => {
computed_inline_size =
MaybeAuto::Specified(size - block.fragment.border_padding.inline_start_end())
}
(MaybeAuto::Auto, box_sizing::T::border_box) |
(_, box_sizing::T::content_box) => {}
}
// The text alignment of a block flow is the text alignment of its box's style.
block.base.flags.set_text_align(style.get_inheritedtext().text_align);
let margin = style.logical_margin();
let position = style.logical_position();
let available_inline_size = containing_block_inline_size -
block.fragment.border_padding.inline_start_end();
ISizeConstraintInput::new(computed_inline_size,
MaybeAuto::from_style(margin.inline_start,
containing_block_inline_size),
MaybeAuto::from_style(margin.inline_end,
containing_block_inline_size),
MaybeAuto::from_style(position.inline_start,
containing_block_inline_size),
MaybeAuto::from_style(position.inline_end,
containing_block_inline_size),
style.get_inheritedtext().text_align,
available_inline_size)
}
/// Set the used values for inline-size and margins from the relevant constraint equation.
/// This is called only once.
///
/// Set:
/// * Used values for content inline-size, inline-start margin, and inline-end margin for this
/// flow's box;
/// * Inline-start coordinate of this flow's box;
/// * Inline-start coordinate of the flow with respect to its containing block (if this is an
/// absolute flow).
fn set_inline_size_constraint_solutions(&self,
block: &mut BlockFlow,
solution: ISizeConstraintSolution) {
let inline_size;
let extra_inline_size_from_margin;
{
let block_mode = block.base.writing_mode;
// FIXME (mbrubeck): Get correct containing block for positioned blocks?
let container_mode = block.base.block_container_writing_mode;
let container_size = block.base.block_container_inline_size;
let fragment = block.fragment();
fragment.margin.inline_start = solution.margin_inline_start;
fragment.margin.inline_end = solution.margin_inline_end;
// The associated fragment has the border box of this flow.
inline_size = solution.inline_size + fragment.border_padding.inline_start_end();
fragment.border_box.size.inline = inline_size;
// Start border edge.
// FIXME (mbrubeck): Handle vertical writing modes.
fragment.border_box.start.i =
if container_mode.is_bidi_ltr() == block_mode.is_bidi_ltr() {
fragment.margin.inline_start
} else {
// The parent's "start" direction is the child's "end" direction.
container_size - inline_size - fragment.margin.inline_end
};
// To calculate the total size of this block, we also need to account for any
// additional size contribution from positive margins. Negative margins means the block
// isn't made larger at all by the margin.
extra_inline_size_from_margin = max(Au(0), fragment.margin.inline_start) +
max(Au(0), fragment.margin.inline_end);
}
// We also resize the block itself, to ensure that overflow is not calculated
// as the inline-size of our parent. We might be smaller and we might be larger if we
// overflow.
flow::mut_base(block).position.size.inline = inline_size + extra_inline_size_from_margin;
}
/// Set the inline coordinate of the given flow if it is absolutely positioned.
fn set_inline_position_of_flow_if_necessary(&self,
_: &mut BlockFlow,
_: ISizeConstraintSolution) {}
/// Solve the inline-size and margins constraints for this block flow.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution;
fn initial_computed_inline_size(&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
layout_context: &LayoutContext)
-> MaybeAuto {
MaybeAuto::from_style(block.fragment().style().content_inline_size(),
self.containing_block_inline_size(block,
parent_flow_inline_size,
layout_context))
}
fn containing_block_inline_size(&self,
_: &mut BlockFlow,
parent_flow_inline_size: Au,
_: &LayoutContext)
-> Au {
parent_flow_inline_size
}
/// Compute the used value of inline-size, taking care of min-inline-size and max-inline-size.
///
/// CSS Section 10.4: Minimum and Maximum inline-sizes
fn compute_used_inline_size(&self,
block: &mut BlockFlow,
layout_context: &LayoutContext,
parent_flow_inline_size: Au) {
let mut input = self.compute_inline_size_constraint_inputs(block,
parent_flow_inline_size,
layout_context);
let containing_block_inline_size =
self.containing_block_inline_size(block, parent_flow_inline_size, layout_context);
let mut solution = self.solve_inline_size_constraints(block, &input);
// If the tentative used inline-size is greater than 'max-inline-size', inline-size should
// be recalculated, but this time using the computed value of 'max-inline-size' as the
// computed value for 'inline-size'.
match specified_or_none(block.fragment().style().max_inline_size(),
containing_block_inline_size) {
Some(max_inline_size) if max_inline_size < solution.inline_size => {
input.computed_inline_size = MaybeAuto::Specified(max_inline_size);
solution = self.solve_inline_size_constraints(block, &input);
}
_ => {}
}
// If the resulting inline-size is smaller than 'min-inline-size', inline-size should be
// recalculated, but this time using the value of 'min-inline-size' as the computed value
// for 'inline-size'.
let computed_min_inline_size = specified(block.fragment().style().min_inline_size(),
containing_block_inline_size);
if computed_min_inline_size > solution.inline_size {
input.computed_inline_size = MaybeAuto::Specified(computed_min_inline_size);
solution = self.solve_inline_size_constraints(block, &input);
}
self.set_inline_size_constraint_solutions(block, solution);
self.set_inline_position_of_flow_if_necessary(block, solution);
}
/// Computes inline-start and inline-end margins and inline-size.
///
/// This is used by both replaced and non-replaced Blocks.
///
/// CSS 2.1 Section 10.3.3.
/// Constraint Equation: margin-inline-start + margin-inline-end + inline-size =
/// available_inline-size
/// where available_inline-size = CB inline-size - (horizontal border + padding)
fn solve_block_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
let (computed_inline_size, inline_start_margin, inline_end_margin, available_inline_size) =
(input.computed_inline_size,
input.inline_start_margin,
input.inline_end_margin,
input.available_inline_size);
// Check for direction of parent flow (NOT Containing Block)
let block_mode = block.base.writing_mode;
let container_mode = block.base.block_container_writing_mode;
// FIXME (mbrubeck): Handle vertical writing modes.
let parent_has_same_direction = container_mode.is_bidi_ltr() == block_mode.is_bidi_ltr();
// If inline-size is not 'auto', and inline-size + margins > available_inline-size, all
// 'auto' margins are treated as 0.
let (inline_start_margin, inline_end_margin) = match computed_inline_size {
MaybeAuto::Auto => (inline_start_margin, inline_end_margin),
MaybeAuto::Specified(inline_size) => {
let inline_start = inline_start_margin.specified_or_zero();
let inline_end = inline_end_margin.specified_or_zero();
if (inline_start + inline_end + inline_size) > available_inline_size {
(MaybeAuto::Specified(inline_start), MaybeAuto::Specified(inline_end))
} else {
(inline_start_margin, inline_end_margin)
}
}
};
// Invariant: inline-start_margin + inline-size + inline-end_margin ==
// available_inline-size
let (inline_start_margin, inline_size, inline_end_margin) =
match (inline_start_margin, computed_inline_size, inline_end_margin) {
// If all have a computed value other than 'auto', the system is over-constrained.
(MaybeAuto::Specified(margin_start),
MaybeAuto::Specified(inline_size),
MaybeAuto::Specified(margin_end)) => {
match (input.text_align, parent_has_same_direction) {
(text_align::T::servo_center, _) => {
// This is used for `<center>` and friends per HTML5 § 14.3.3. Make the
// inline-start and inline-end margins equal per HTML5 § 14.2.
let margin = (available_inline_size - inline_size).scale_by(0.5);
(margin, inline_size, margin)
}
(_, true) => {
// Ignore the end margin.
(margin_start, inline_size, available_inline_size -
(margin_start + inline_size))
}
(_, false) => {
// Ignore the start margin.
(available_inline_size - (margin_end + inline_size),
inline_size,
margin_end)
}
}
}
// If exactly one value is 'auto', solve for it
(MaybeAuto::Auto,
MaybeAuto::Specified(inline_size),
MaybeAuto::Specified(margin_end)) =>
(available_inline_size - (inline_size + margin_end), inline_size, margin_end),
(MaybeAuto::Specified(margin_start),
MaybeAuto::Auto,
MaybeAuto::Specified(margin_end)) => {
(margin_start,
available_inline_size - (margin_start + margin_end),
margin_end)
}
(MaybeAuto::Specified(margin_start),
MaybeAuto::Specified(inline_size),
MaybeAuto::Auto) => {
(margin_start,
inline_size,
available_inline_size - (margin_start + inline_size))
}
// If inline-size is set to 'auto', any other 'auto' value becomes '0',
// and inline-size is solved for
(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Specified(margin_end)) => {
(Au(0), available_inline_size - margin_end, margin_end)
}
(MaybeAuto::Specified(margin_start), MaybeAuto::Auto, MaybeAuto::Auto) => {
(margin_start, available_inline_size - margin_start, Au(0))
}
(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
(Au(0), available_inline_size, Au(0))
}
// If inline-start and inline-end margins are auto, they become equal
(MaybeAuto::Auto, MaybeAuto::Specified(inline_size), MaybeAuto::Auto) => {
let margin = (available_inline_size - inline_size).scale_by(0.5);
(margin, inline_size, margin)
}
};
ISizeConstraintSolution::new(inline_size, inline_start_margin, inline_end_margin)
}
}
/// The different types of Blocks.
///
/// They mainly differ in the way inline-size and block-sizes and margins are calculated
/// for them.
pub struct AbsoluteNonReplaced;
pub struct AbsoluteReplaced;
pub struct BlockNonReplaced;
pub struct BlockReplaced;
pub struct FloatNonReplaced;
pub struct FloatReplaced;
pub struct InlineBlockNonReplaced;
pub struct InlineBlockReplaced;
impl ISizeAndMarginsComputer for AbsoluteNonReplaced {
/// Solve the horizontal constraint equation for absolute non-replaced elements.
///
/// CSS Section 10.3.7
/// Constraint equation:
/// inline-start + inline-end + inline-size + margin-inline-start + margin-inline-end
/// = absolute containing block inline-size - (horizontal padding and border)
/// [aka available inline-size]
///
/// Return the solution for the equation.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
let &ISizeConstraintInput {
computed_inline_size,
inline_start_margin,
inline_end_margin,
inline_start,
inline_end,
available_inline_size,
..
} = input;
// Check for direction of parent flow (NOT Containing Block)
let block_mode = block.base.writing_mode;
let container_mode = block.base.block_container_writing_mode;
// FIXME (mbrubeck): Handle vertical writing modes.
let parent_has_same_direction = container_mode.is_bidi_ltr() == block_mode.is_bidi_ltr();
let (inline_start, inline_size, margin_inline_start, margin_inline_end) =
match (inline_start, inline_end, computed_inline_size) {
(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
// Now it is the same situation as inline-start Specified and inline-end
// and inline-size Auto.
// Set inline-end to zero to calculate inline-size.
let inline_size =
block.get_shrink_to_fit_inline_size(available_inline_size -
(margin_start + margin_end));
(Au(0), inline_size, margin_start, margin_end)
}
(MaybeAuto::Specified(inline_start),
MaybeAuto::Specified(inline_end),
MaybeAuto::Specified(inline_size)) => {
match (inline_start_margin, inline_end_margin) {
(MaybeAuto::Auto, MaybeAuto::Auto) => {
let total_margin_val =
available_inline_size - inline_start - inline_end - inline_size;
if total_margin_val < Au(0) {
if parent_has_same_direction {
// margin-inline-start becomes 0
(inline_start, inline_size, Au(0), total_margin_val)
} else {
// margin-inline-end becomes 0, because it's toward the parent's
// inline-start edge.
(inline_start, inline_size, total_margin_val, Au(0))
}
} else {
// Equal margins
(inline_start,
inline_size,
total_margin_val.scale_by(0.5),
total_margin_val.scale_by(0.5))
}
}
(MaybeAuto::Specified(margin_start), MaybeAuto::Auto) => {
let sum = inline_start + inline_end + inline_size + margin_start;
(inline_start, inline_size, margin_start, available_inline_size - sum)
}
(MaybeAuto::Auto, MaybeAuto::Specified(margin_end)) => {
let sum = inline_start + inline_end + inline_size + margin_end;
(inline_start, inline_size, available_inline_size - sum, margin_end)
}
(MaybeAuto::Specified(margin_start), MaybeAuto::Specified(margin_end)) => {
// Values are over-constrained.
let sum = inline_start + inline_size + margin_start + margin_end;
if parent_has_same_direction {
// Ignore value for 'inline-end'
(inline_start, inline_size, margin_start, margin_end)
} else {
// Ignore value for 'inline-start'
(available_inline_size - sum,
inline_size,
margin_start,
margin_end)
}
}
}
}
// For the rest of the cases, auto values for margin are set to 0
// If only one is Auto, solve for it
(MaybeAuto::Auto,
MaybeAuto::Specified(inline_end),
MaybeAuto::Specified(inline_size)) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
let sum = inline_end + inline_size + margin_start + margin_end;
(available_inline_size - sum, inline_size, margin_start, margin_end)
}
(MaybeAuto::Specified(inline_start),
MaybeAuto::Auto,
MaybeAuto::Specified(inline_size)) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
(inline_start, inline_size, margin_start, margin_end)
}
(MaybeAuto::Specified(inline_start),
MaybeAuto::Specified(inline_end),
MaybeAuto::Auto) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
let sum = inline_start + inline_end + margin_start + margin_end;
(inline_start, available_inline_size - sum, margin_start, margin_end)
}
// If inline-size is auto, then inline-size is shrink-to-fit. Solve for the
// non-auto value.
(MaybeAuto::Specified(inline_start), MaybeAuto::Auto, MaybeAuto::Auto) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
// Set inline-end to zero to calculate inline-size
let inline_size =
block.get_shrink_to_fit_inline_size(available_inline_size -
(margin_start + margin_end));
(inline_start, inline_size, margin_start, margin_end)
}
(MaybeAuto::Auto, MaybeAuto::Specified(inline_end), MaybeAuto::Auto) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
// Set inline-start to zero to calculate inline-size
let inline_size =
block.get_shrink_to_fit_inline_size(available_inline_size -
(margin_start + margin_end));
let sum = inline_end + inline_size + margin_start + margin_end;
(available_inline_size - sum, inline_size, margin_start, margin_end)
}
(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Specified(inline_size)) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
// Setting 'inline-start' to static position because direction is 'ltr'.
// TODO: Handle 'rtl' when it is implemented.
(Au(0), inline_size, margin_start, margin_end)
}
};
ISizeConstraintSolution::for_absolute_flow(inline_start,
inline_size,
margin_inline_start,
margin_inline_end)
}
fn containing_block_inline_size(&self,
block: &mut BlockFlow,
_: Au,
layout_context: &LayoutContext)
-> Au {
let opaque_block = OpaqueFlow::from_flow(block);
block.containing_block_size(&layout_context.shared.screen_size, opaque_block).inline
}
fn set_inline_position_of_flow_if_necessary(&self,
block: &mut BlockFlow,
solution: ISizeConstraintSolution) {
// Set the inline position of the absolute flow wrt to its containing block.
if !block.base.flags.contains(INLINE_POSITION_IS_STATIC) {
block.base.position.start.i = solution.inline_start;
}
}
}
impl ISizeAndMarginsComputer for AbsoluteReplaced {
/// Solve the horizontal constraint equation for absolute replaced elements.
///
/// CSS Section 10.3.8
/// Constraint equation:
/// inline-start + inline-end + inline-size + margin-inline-start + margin-inline-end
/// = absolute containing block inline-size - (horizontal padding and border)
/// [aka available_inline-size]
///
/// Return the solution for the equation.
fn solve_inline_size_constraints(&self, _: &mut BlockFlow, input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
let &ISizeConstraintInput {
computed_inline_size,
inline_start_margin,
inline_end_margin,
inline_start,
inline_end,
available_inline_size,
..
} = input;
// TODO: Check for direction of static-position Containing Block (aka
// parent flow, _not_ the actual Containing Block) when right-to-left
// is implemented
// Assume direction is 'ltr' for now
// TODO: Handle all the cases for 'rtl' direction.
let inline_size = match computed_inline_size {
MaybeAuto::Specified(w) => w,
_ => panic!("{} {}",
"The used value for inline_size for absolute replaced flow",
"should have already been calculated by now.")
};
let (inline_start, inline_size, margin_inline_start, margin_inline_end) =
match (inline_start, inline_end) {
(MaybeAuto::Auto, MaybeAuto::Auto) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
(Au(0), inline_size, margin_start, margin_end)
}
// If only one is Auto, solve for it
(MaybeAuto::Auto, MaybeAuto::Specified(inline_end)) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
let sum = inline_end + inline_size + margin_start + margin_end;
(available_inline_size - sum, inline_size, margin_start, margin_end)
}
(MaybeAuto::Specified(inline_start), MaybeAuto::Auto) => {
let margin_start = inline_start_margin.specified_or_zero();
let margin_end = inline_end_margin.specified_or_zero();
(inline_start, inline_size, margin_start, margin_end)
}
(MaybeAuto::Specified(inline_start), MaybeAuto::Specified(inline_end)) => {
match (inline_start_margin, inline_end_margin) {
(MaybeAuto::Auto, MaybeAuto::Auto) => {
let total_margin_val = available_inline_size - inline_start -
inline_end - inline_size;
if total_margin_val < Au(0) {
// margin-inline-start becomes 0 because direction is 'ltr'.
(inline_start, inline_size, Au(0), total_margin_val)
} else {
// Equal margins
(inline_start,
inline_size,
total_margin_val.scale_by(0.5),
total_margin_val.scale_by(0.5))
}
}
(MaybeAuto::Specified(margin_start), MaybeAuto::Auto) => {
let sum = inline_start + inline_end + inline_size + margin_start;
(inline_start, inline_size, margin_start, available_inline_size - sum)
}
(MaybeAuto::Auto, MaybeAuto::Specified(margin_end)) => {
let sum = inline_start + inline_end + inline_size + margin_end;
(inline_start, inline_size, available_inline_size - sum, margin_end)
}
(MaybeAuto::Specified(margin_start), MaybeAuto::Specified(margin_end)) => {
// Values are over-constrained.
// Ignore value for 'inline-end' cos direction is 'ltr'.
(inline_start, inline_size, margin_start, margin_end)
}
}
}
};
ISizeConstraintSolution::for_absolute_flow(inline_start,
inline_size,
margin_inline_start,
margin_inline_end)
}
/// Calculate used value of inline-size just like we do for inline replaced elements.
fn initial_computed_inline_size(&self,
block: &mut BlockFlow,
_: Au,
layout_context: &LayoutContext)
-> MaybeAuto {
let opaque_block = OpaqueFlow::from_flow(block);
let containing_block_inline_size =
block.containing_block_size(&layout_context.shared.screen_size, opaque_block).inline;
let fragment = block.fragment();
fragment.assign_replaced_inline_size_if_necessary(containing_block_inline_size);
// For replaced absolute flow, the rest of the constraint solving will
// take inline-size to be specified as the value computed here.
MaybeAuto::Specified(fragment.content_inline_size())
}
fn containing_block_inline_size(&self,
block: &mut BlockFlow,
_: Au,
layout_context: &LayoutContext)
-> Au {
let opaque_block = OpaqueFlow::from_flow(block);
block.containing_block_size(&layout_context.shared.screen_size, opaque_block).inline
}
fn set_inline_position_of_flow_if_necessary(&self,
block: &mut BlockFlow,
solution: ISizeConstraintSolution) {
// Set the x-coordinate of the absolute flow wrt to its containing block.
block.base.position.start.i = solution.inline_start;
}
}
impl ISizeAndMarginsComputer for BlockNonReplaced {
/// Compute inline-start and inline-end margins and inline-size.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
self.solve_block_inline_size_constraints(block, input)
}
}
impl ISizeAndMarginsComputer for BlockReplaced {
/// Compute inline-start and inline-end margins and inline-size.
///
/// ISize has already been calculated. We now calculate the margins just
/// like for non-replaced blocks.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
match input.computed_inline_size {
MaybeAuto::Specified(_) => {},
MaybeAuto::Auto => {
panic!("BlockReplaced: inline_size should have been computed by now")
}
};
self.solve_block_inline_size_constraints(block, input)
}
/// Calculate used value of inline-size just like we do for inline replaced elements.
fn initial_computed_inline_size(&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
_: &LayoutContext)
-> MaybeAuto {
let fragment = block.fragment();
fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size);
// For replaced block flow, the rest of the constraint solving will
// take inline-size to be specified as the value computed here.
MaybeAuto::Specified(fragment.content_inline_size())
}
}
impl ISizeAndMarginsComputer for FloatNonReplaced {
/// CSS Section 10.3.5
///
/// If inline-size is computed as 'auto', the used value is the 'shrink-to-fit' inline-size.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
let (computed_inline_size, inline_start_margin, inline_end_margin, available_inline_size) =
(input.computed_inline_size,
input.inline_start_margin,
input.inline_end_margin,
input.available_inline_size);
let margin_inline_start = inline_start_margin.specified_or_zero();
let margin_inline_end = inline_end_margin.specified_or_zero();
let available_inline_size_float = available_inline_size - margin_inline_start -
margin_inline_end;
let shrink_to_fit = block.get_shrink_to_fit_inline_size(available_inline_size_float);
let inline_size = computed_inline_size.specified_or_default(shrink_to_fit);
debug!("assign_inline_sizes_float -- inline_size: {:?}", inline_size);
ISizeConstraintSolution::new(inline_size, margin_inline_start, margin_inline_end)
}
}
impl ISizeAndMarginsComputer for FloatReplaced {
/// CSS Section 10.3.5
///
/// If inline-size is computed as 'auto', the used value is the 'shrink-to-fit' inline-size.
fn solve_inline_size_constraints(&self, _: &mut BlockFlow, input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
let (computed_inline_size, inline_start_margin, inline_end_margin) =
(input.computed_inline_size, input.inline_start_margin, input.inline_end_margin);
let margin_inline_start = inline_start_margin.specified_or_zero();
let margin_inline_end = inline_end_margin.specified_or_zero();
let inline_size = match computed_inline_size {
MaybeAuto::Specified(w) => w,
MaybeAuto::Auto => panic!("FloatReplaced: inline_size should have been computed by now")
};
debug!("assign_inline_sizes_float -- inline_size: {:?}", inline_size);
ISizeConstraintSolution::new(inline_size, margin_inline_start, margin_inline_end)
}
/// Calculate used value of inline-size just like we do for inline replaced elements.
fn initial_computed_inline_size(&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
_: &LayoutContext)
-> MaybeAuto {
let fragment = block.fragment();
fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size);
// For replaced block flow, the rest of the constraint solving will
// take inline-size to be specified as the value computed here.
MaybeAuto::Specified(fragment.content_inline_size())
}
}
impl ISizeAndMarginsComputer for InlineBlockNonReplaced {
/// Compute inline-start and inline-end margins and inline-size.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
let (computed_inline_size,
inline_start_margin,
inline_end_margin,
available_inline_size) =
(input.computed_inline_size,
input.inline_start_margin,
input.inline_end_margin,
input.available_inline_size);
// For inline-blocks, `auto` margins compute to 0.
let inline_start_margin = inline_start_margin.specified_or_zero();
let inline_end_margin = inline_end_margin.specified_or_zero();
// If inline-size is set to 'auto', and this is an inline block, use the
// shrink to fit algorithm (see CSS 2.1 § 10.3.9)
let inline_size = match computed_inline_size {
MaybeAuto::Auto => {
block.get_shrink_to_fit_inline_size(available_inline_size - (inline_start_margin +
inline_end_margin))
}
MaybeAuto::Specified(inline_size) => inline_size,
};
ISizeConstraintSolution::new(inline_size, inline_start_margin, inline_end_margin)
}
}
impl ISizeAndMarginsComputer for InlineBlockReplaced {
/// Compute inline-start and inline-end margins and inline-size.
///
/// ISize has already been calculated. We now calculate the margins just
/// like for non-replaced blocks.
fn solve_inline_size_constraints(&self,
block: &mut BlockFlow,
input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
debug_assert!(match input.computed_inline_size {
MaybeAuto::Specified(_) => true,
MaybeAuto::Auto => false,
});
let (computed_inline_size,
inline_start_margin,
inline_end_margin,
available_inline_size) =
(input.computed_inline_size,
input.inline_start_margin,
input.inline_end_margin,
input.available_inline_size);
// For inline-blocks, `auto` margins compute to 0.
let inline_start_margin = inline_start_margin.specified_or_zero();
let inline_end_margin = inline_end_margin.specified_or_zero();
// If inline-size is set to 'auto', and this is an inline block, use the
// shrink to fit algorithm (see CSS 2.1 § 10.3.9)
let inline_size = match computed_inline_size {
MaybeAuto::Auto => {
block.get_shrink_to_fit_inline_size(available_inline_size - (inline_start_margin +
inline_end_margin))
}
MaybeAuto::Specified(inline_size) => inline_size,
};
ISizeConstraintSolution::new(inline_size, inline_start_margin, inline_end_margin)
}
/// Calculate used value of inline-size just like we do for inline replaced elements.
fn initial_computed_inline_size(&self,
block: &mut BlockFlow,
parent_flow_inline_size: Au,
_: &LayoutContext)
-> MaybeAuto {
let fragment = block.fragment();
fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size);
// For replaced block flow, the rest of the constraint solving will
// take inline-size to be specified as the value computed here.
MaybeAuto::Specified(fragment.content_inline_size())
}
}<|fim▁end|> |
(MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Specified(block_size)) => {
let margin_block_start = block_start_margin.specified_or_zero(); |
<|file_name|>issue-81218.rs<|end_file_name|><|fim▁begin|>// Regression test for #81218
//
// check-pass
<|fim▁hole|>#![forbid(warnings)]
#[allow(unused_variables)]
fn main() {
// We want to ensure that you don't get an error
// here. The idea is that a derive might generate
// code that would otherwise trigger the "unused variables"
// lint, but it is meant to be suppressed.
let x: ();
}<|fim▁end|> | |
<|file_name|>getCtor.js<|end_file_name|><|fim▁begin|>'use strict';
// MODULES //
var ctor = require( './ctor.js' );
// VARIABLES //
var CACHE = require( './cache.js' ).CTORS;
// GET CTOR //
/**
* FUNCTION: getCtor( dtype, ndims )<|fim▁hole|>*
* @param {String} dtype - underlying ndarray data type
* @param {Number} ndims - view dimensions
* @returns {ndarray} ndarray constructor
*/
function getCtor( dtype, ndims ) {
var ctors,
len,
i;
ctors = CACHE[ dtype ];
len = ctors.length;
// If the constructor has not already been created, use the opportunity to create it, as well as any lower dimensional constructors of the same data type....
for ( i = len+1; i <= ndims; i++ ) {
ctors.push( ctor( dtype, i ) );
}
return ctors[ ndims-1 ];
} // end FUNCTION getCtor()
// EXPORTS //
module.exports = getCtor;<|fim▁end|> | * Returns an ndarray constructor. |
<|file_name|>pys_define.py<|end_file_name|><|fim▁begin|>#!/bin/env python
# -*- coding: utf-8 -*-
PYS_SERVICE_MOD_PRE='pys_' # 模块名称的前缀<|fim▁hole|><|fim▁end|> | PYS_HEAD_LEN=12 # 报文头长度
PYS_MAX_BODY_LEN=10485760 # 最大报文长度 |
<|file_name|>iter.rs<|end_file_name|><|fim▁begin|>//! Definitions of a bunch of iterators for `[T]`.
#[macro_use] // import iterator! and forward_iterator!
mod macros;
use crate::cmp;
use crate::cmp::Ordering;
use crate::fmt;
use crate::intrinsics::{assume, exact_div, unchecked_sub};
use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce};
use crate::marker::{PhantomData, Send, Sized, Sync};
use crate::mem;
use crate::num::NonZeroUsize;
use crate::ptr::NonNull;
use super::{from_raw_parts, from_raw_parts_mut};
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a [T] {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a mut [T] {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> IterMut<'a, T> {
self.iter_mut()
}
}
// Macro helper functions
#[inline(always)]
fn size_from_ptr<T>(_: *const T) -> usize {
mem::size_of::<T>()
}
/// Immutable slice iterator
///
/// This struct is created by the [`iter`] method on [slices].
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter` method to get the `Iter` struct (`&[usize]` here):
/// let slice = &[1, 2, 3];
///
/// // Then, we iterate over it:
/// for element in slice.iter() {
/// println!("{}", element);
/// }
/// ```
///
/// [`iter`]: slice::iter
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
ptr: NonNull<T>,
end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
// ptr == end is a quick test for the Iterator being empty, that works
// for both ZST and non-ZST.
_marker: PhantomData<&'a T>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Iter").field(&self.as_slice()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Send for Iter<'_, T> {}
impl<'a, T> Iter<'a, T> {
#[inline]
pub(super) fn new(slice: &'a [T]) -> Self {
let ptr = slice.as_ptr();
// SAFETY: Similar to `IterMut::new`.
unsafe {
assume(!ptr.is_null());
let end = if mem::size_of::<T>() == 0 {
(ptr as *const u8).wrapping_add(slice.len()) as *const T
} else {
ptr.add(slice.len())
};
Self { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: PhantomData }
}
}
/// Views the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has the `iter` method to get the `Iter`
/// // struct (`&[usize]` here):
/// let slice = &[1, 2, 3];
///
/// // Then, we get the iterator:
/// let mut iter = slice.iter();
/// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]":
/// println!("{:?}", iter.as_slice());
///
/// // Next, we move to the second element of the slice:
/// iter.next();
/// // Now `as_slice` returns "[2, 3]":
/// println!("{:?}", iter.as_slice());
/// ```
#[stable(feature = "iter_to_slice", since = "1.4.0")]
pub fn as_slice(&self) -> &'a [T] {
self.make_slice()
}
}
iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
fn is_sorted_by<F>(self, mut compare: F) -> bool
where
Self: Sized,
F: FnMut(&Self::Item, &Self::Item) -> Option<Ordering>,
{
self.as_slice().windows(2).all(|w| {
compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false)
})
}
}}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Iter<'_, T> {
fn clone(&self) -> Self {
Iter { ptr: self.ptr, end: self.end, _marker: self._marker }
}
}
#[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
impl<T> AsRef<[T]> for Iter<'_, T> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
/// Mutable slice iterator.
///
/// This struct is created by the [`iter_mut`] method on [slices].
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter_mut` method to get the `IterMut`
/// // struct (`&[usize]` here):
/// let mut slice = &mut [1, 2, 3];
///
/// // Then, we iterate over it and increment each element value:
/// for element in slice.iter_mut() {
/// *element += 1;
/// }
///
/// // We now have "[2, 3, 4]":
/// println!("{:?}", slice);
/// ```
///
/// [`iter_mut`]: slice::iter_mut
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> {
ptr: NonNull<T>,
end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
// ptr == end is a quick test for the Iterator being empty, that works
// for both ZST and non-ZST.
_marker: PhantomData<&'a mut T>,
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IterMut").field(&self.make_slice()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Send> Send for IterMut<'_, T> {}
impl<'a, T> IterMut<'a, T> {
#[inline]
pub(super) fn new(slice: &'a mut [T]) -> Self {
let ptr = slice.as_mut_ptr();
// SAFETY: There are several things here:
//
// `ptr` has been obtained by `slice.as_ptr()` where `slice` is a valid
// reference thus it is non-NUL and safe to use and pass to
// `NonNull::new_unchecked` .
//
// Adding `slice.len()` to the starting pointer gives a pointer
// at the end of `slice`. `end` will never be dereferenced, only checked
// for direct pointer equality with `ptr` to check if the iterator is
// done.
//
// In the case of a ZST, the end pointer is just the start pointer plus
// the length, to also allows for the fast `ptr == end` check.
//
// See the `next_unchecked!` and `is_empty!` macros as well as the
// `post_inc_start` method for more informations.
unsafe {
assume(!ptr.is_null());
let end = if mem::size_of::<T>() == 0 {
(ptr as *mut u8).wrapping_add(slice.len()) as *mut T
} else {
ptr.add(slice.len())
};
Self { ptr: NonNull::new_unchecked(ptr), end, _marker: PhantomData }
}
}
/// Views the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut` references that alias, this is forced
/// to consume the iterator.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// // First, we declare a type which has `iter_mut` method to get the `IterMut`
/// // struct (`&[usize]` here):
/// let mut slice = &mut [1, 2, 3];
///
/// {
/// // Then, we get the iterator:
/// let mut iter = slice.iter_mut();
/// // We move to next element:
/// iter.next();
/// // So if we print what `into_slice` method returns here, we have "[2, 3]":
/// println!("{:?}", iter.into_slice());
/// }
///
/// // Now let's modify a value of the slice:
/// {
/// // First we get back the iterator:
/// let mut iter = slice.iter_mut();
/// // We change the value of the first element of the slice returned by the `next` method:
/// *iter.next().unwrap() += 1;
/// }
/// // Now slice is "[2, 2, 3]":
/// println!("{:?}", slice);
/// ```
#[stable(feature = "iter_to_slice", since = "1.4.0")]
pub fn into_slice(self) -> &'a mut [T] {
// SAFETY: the iterator was created from a mutable slice with pointer
// `self.ptr` and length `len!(self)`. This guarantees that all the prerequisites
// for `from_raw_parts_mut` are fulfilled.
unsafe { from_raw_parts_mut(self.ptr.as_ptr(), len!(self)) }
}
/// Views the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut [T]` references that alias, the returned slice
/// borrows its lifetime from the iterator the method is applied on.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let mut slice: &mut [usize] = &mut [1, 2, 3];
///
/// // First, we get the iterator:
/// let mut iter = slice.iter_mut();
/// // So if we check what the `as_slice` method returns here, we have "[1, 2, 3]":
/// assert_eq!(iter.as_slice(), &[1, 2, 3]);
///
/// // Next, we move to the second element of the slice:
/// iter.next();
/// // Now `as_slice` returns "[2, 3]":
/// assert_eq!(iter.as_slice(), &[2, 3]);
/// ```
#[stable(feature = "slice_iter_mut_as_slice", since = "1.53.0")]
pub fn as_slice(&self) -> &[T] {
self.make_slice()
}
}
#[stable(feature = "slice_iter_mut_as_slice", since = "1.53.0")]
impl<T> AsRef<[T]> for IterMut<'_, T> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
iterator! {struct IterMut -> *mut T, &'a mut T, mut, {mut}, {}}
/// An internal abstraction over the splitting iterators, so that
/// splitn, splitn_mut etc can be implemented once.
#[doc(hidden)]
pub(super) trait SplitIter: DoubleEndedIterator {
/// Marks the underlying iterator as complete, extracting the remaining
/// portion of the slice.
fn finish(&mut self) -> Option<Self::Item>;
}
/// An iterator over subslices separated by elements that match a predicate
/// function.
///
/// This struct is created by the [`split`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = [10, 40, 33, 20];
/// let mut iter = slice.split(|num| num % 3 == 0);
/// ```
///
/// [`split`]: slice::split
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Split<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
// Used for `SplitWhitespace` and `SplitAsciiWhitespace` `as_str` methods
pub(crate) v: &'a [T],
pred: P,
// Used for `SplitAsciiWhitespace` `as_str` method
pub(crate) finished: bool,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> Split<'a, T, P> {
#[inline]
pub(super) fn new(slice: &'a [T], pred: P) -> Self {
Self { v: slice, pred, finished: false }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for Split<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Split").field("v", &self.v).field("finished", &self.finished).finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, P> Clone for Split<'_, T, P>
where
P: Clone + FnMut(&T) -> bool,
{
fn clone(&self) -> Self {
Split { v: self.v, pred: self.pred.clone(), finished: self.finished }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Iterator for Split<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
match self.v.iter().position(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
self.v = &self.v[idx + 1..];
ret
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// If the predicate doesn't match anything, we yield one slice.
// If it matches every element, we yield `len() + 1` empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[idx + 1..]);
self.v = &self.v[..idx];
ret
}
}
}
}
impl<'a, T, P> SplitIter for Split<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a [T]> {
if self.finished {
None
} else {
self.finished = true;
Some(self.v)
}
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over subslices separated by elements that match a predicate
/// function. Unlike `Split`, it contains the matched part as a terminator
/// of the subslice.
///
/// This struct is created by the [`split_inclusive`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = [10, 40, 33, 20];
/// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
/// ```
///
/// [`split_inclusive`]: slice::split_inclusive
/// [slices]: slice
#[stable(feature = "split_inclusive", since = "1.51.0")]
pub struct SplitInclusive<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a [T],
pred: P,
finished: bool,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitInclusive<'a, T, P> {
#[inline]
pub(super) fn new(slice: &'a [T], pred: P) -> Self {
Self { v: slice, pred, finished: false }
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitInclusive<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusive")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<T, P> Clone for SplitInclusive<'_, T, P>
where
P: Clone + FnMut(&T) -> bool,
{
fn clone(&self) -> Self {
SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished }
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<'a, T, P> Iterator for SplitInclusive<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
let idx =
self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len());
if idx == self.v.len() {
self.finished = true;
}
let ret = Some(&self.v[..idx]);
self.v = &self.v[idx..];
ret
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// If the predicate doesn't match anything, we yield one slice.
// If it matches every element, we yield `len()` one-element slices,
// or a single empty slice.
(1, Some(cmp::max(1, self.v.len())))
}
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
// The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match
// one index to the left.
let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] };
let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
if idx == 0 {
self.finished = true;
}
let ret = Some(&self.v[idx..]);
self.v = &self.v[..idx];
ret
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<T, P> FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the mutable subslices of the vector which are separated
/// by elements that match `pred`.
///
/// This struct is created by the [`split_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut v = [10, 40, 30, 20, 60, 50];
/// let iter = v.split_mut(|num| *num % 3 == 0);
/// ```
///
/// [`split_mut`]: slice::split_mut
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a mut [T],
pred: P,
finished: bool,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitMut<'a, T, P> {
#[inline]
pub(super) fn new(slice: &'a mut [T], pred: P) -> Self {
Self { v: slice, pred, finished: false }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitMut").field("v", &self.v).field("finished", &self.finished).finish()
}
}
impl<'a, T, P> SplitIter for SplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a mut [T]> {
if self.finished {
None
} else {
self.finished = true;
Some(mem::replace(&mut self.v, &mut []))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> Iterator for SplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = &mut tail[1..];
Some(head)
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// If the predicate doesn't match anything, we yield one slice.
// If it matches every element, we yield `len() + 1` empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().rposition(|x| (*pred)(x))
};
match idx_opt {
None => self.finish(),
Some(idx) => {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(&mut tail[1..])
}
}
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the mutable subslices of the vector which are separated
/// by elements that match `pred`. Unlike `SplitMut`, it contains the matched
/// parts in the ends of the subslices.
///
/// This struct is created by the [`split_inclusive_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut v = [10, 40, 30, 20, 60, 50];
/// let iter = v.split_inclusive_mut(|num| *num % 3 == 0);
/// ```
///
/// [`split_inclusive_mut`]: slice::split_inclusive_mut
/// [slices]: slice
#[stable(feature = "split_inclusive", since = "1.51.0")]
pub struct SplitInclusiveMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a mut [T],
pred: P,
finished: bool,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitInclusiveMut<'a, T, P> {
#[inline]
pub(super) fn new(slice: &'a mut [T], pred: P) -> Self {
Self { v: slice, pred, finished: false }
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitInclusiveMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusiveMut")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len());
if idx == self.v.len() {
self.finished = true;
}
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = tail;
Some(head)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// If the predicate doesn't match anything, we yield one slice.
// If it matches every element, we yield `len()` one-element slices,
// or a single empty slice.
(1, Some(cmp::max(1, self.v.len())))
}
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = if self.v.is_empty() {
None
} else {
// work around borrowck limitations
let pred = &mut self.pred;
// The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match
// one index to the left.
let remainder = &self.v[..(self.v.len() - 1)];
remainder.iter().rposition(|x| (*pred)(x))
};
let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0);
if idx == 0 {
self.finished = true;
}
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(tail)
}
}
#[stable(feature = "split_inclusive", since = "1.51.0")]
impl<T, P> FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over subslices separated by elements that match a predicate
/// function, starting from the end of the slice.
///
/// This struct is created by the [`rsplit`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = [11, 22, 33, 0, 44, 55];
/// let iter = slice.rsplit(|num| *num == 0);
/// ```
///
/// [`rsplit`]: slice::rsplit
/// [slices]: slice
#[stable(feature = "slice_rsplit", since = "1.27.0")]
#[derive(Clone)] // Is this correct, or does it incorrectly require `T: Clone`?
pub struct RSplit<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: Split<'a, T, P>,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> RSplit<'a, T, P> {
#[inline]
pub(super) fn new(slice: &'a [T], pred: P) -> Self {
Self { inner: Split::new(slice, pred) }
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplit<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplit")
.field("v", &self.inner.v)
.field("finished", &self.inner.finished)
.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> Iterator for RSplit<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
self.inner.next_back()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> DoubleEndedIterator for RSplit<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
self.inner.next()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> SplitIter for RSplit<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a [T]> {
self.inner.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T, P> FusedIterator for RSplit<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the subslices of the vector which are separated
/// by elements that match `pred`, starting from the end of the slice.
///
/// This struct is created by the [`rsplit_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = [11, 22, 33, 0, 44, 55];
/// let iter = slice.rsplit_mut(|num| *num == 0);
/// ```
///
/// [`rsplit_mut`]: slice::rsplit_mut
/// [slices]: slice
#[stable(feature = "slice_rsplit", since = "1.27.0")]
pub struct RSplitMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: SplitMut<'a, T, P>,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> RSplitMut<'a, T, P> {
#[inline]
pub(super) fn new(slice: &'a mut [T], pred: P) -> Self {
Self { inner: SplitMut::new(slice, pred) }
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplitMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplitMut")
.field("v", &self.inner.v)
.field("finished", &self.inner.finished)
.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> SplitIter for RSplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn finish(&mut self) -> Option<&'a mut [T]> {
self.inner.finish()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> Iterator for RSplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
self.inner.next_back()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<'a, T, P> DoubleEndedIterator for RSplitMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
self.inner.next()
}
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
impl<T, P> FusedIterator for RSplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An private iterator over subslices separated by elements that
/// match a predicate function, splitting at most a fixed number of
/// times.
#[derive(Debug)]
struct GenericSplitN<I> {
iter: I,
count: usize,
}
impl<T, I: SplitIter<Item = T>> Iterator for GenericSplitN<I> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
match self.count {
0 => None,
1 => {
self.count -= 1;
self.iter.finish()
}
_ => {
self.count -= 1;
self.iter.next()
}
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, upper_opt) = self.iter.size_hint();
(
cmp::min(self.count, lower),
Some(upper_opt.map_or(self.count, |upper| cmp::min(self.count, upper))),
)
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
///
/// This struct is created by the [`splitn`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = [10, 40, 30, 20, 60, 50];
/// let iter = slice.splitn(2, |num| *num % 3 == 0);
/// ```
///
/// [`splitn`]: slice::splitn
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitN<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<Split<'a, T, P>>,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitN<'a, T, P> {
#[inline]
pub(super) fn new(s: Split<'a, T, P>, n: usize) -> Self {
Self { inner: GenericSplitN { iter: s, count: n } }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitN<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitN").field("inner", &self.inner).finish()
}
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
///
/// This struct is created by the [`rsplitn`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = [10, 40, 30, 20, 60, 50];
/// let iter = slice.rsplitn(2, |num| *num % 3 == 0);
/// ```
///
/// [`rsplitn`]: slice::rsplitn
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitN<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<RSplit<'a, T, P>>,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> RSplitN<'a, T, P> {
#[inline]
pub(super) fn new(s: RSplit<'a, T, P>, n: usize) -> Self {
Self { inner: GenericSplitN { iter: s, count: n } }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplitN<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplitN").field("inner", &self.inner).finish()
}
}
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
///
/// This struct is created by the [`splitn_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = [10, 40, 30, 20, 60, 50];
/// let iter = slice.splitn_mut(2, |num| *num % 3 == 0);
/// ```
///
/// [`splitn_mut`]: slice::splitn_mut
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitNMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<SplitMut<'a, T, P>>,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitNMut<'a, T, P> {
#[inline]
pub(super) fn new(s: SplitMut<'a, T, P>, n: usize) -> Self {
Self { inner: GenericSplitN { iter: s, count: n } }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for SplitNMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitNMut").field("inner", &self.inner).finish()
}
}
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
///
/// This struct is created by the [`rsplitn_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = [10, 40, 30, 20, 60, 50];
/// let iter = slice.rsplitn_mut(2, |num| *num % 3 == 0);
/// ```
///
/// [`rsplitn_mut`]: slice::rsplitn_mut
/// [slices]: slice
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitNMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
inner: GenericSplitN<RSplitMut<'a, T, P>>,
}
impl<'a, T: 'a, P: FnMut(&T) -> bool> RSplitNMut<'a, T, P> {
#[inline]
pub(super) fn new(s: RSplitMut<'a, T, P>, n: usize) -> Self {
Self { inner: GenericSplitN { iter: s, count: n } }
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: fmt::Debug, P> fmt::Debug for RSplitNMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RSplitNMut").field("inner", &self.inner).finish()
}
}
forward_iterator! { SplitN: T, &'a [T] }
forward_iterator! { RSplitN: T, &'a [T] }
forward_iterator! { SplitNMut: T, &'a mut [T] }
forward_iterator! { RSplitNMut: T, &'a mut [T] }
/// An iterator over overlapping subslices of length `size`.
///
/// This struct is created by the [`windows`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = ['r', 'u', 's', 't'];
/// let iter = slice.windows(2);
/// ```
///
/// [`windows`]: slice::windows
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Windows<'a, T: 'a> {
v: &'a [T],
size: NonZeroUsize,
}
impl<'a, T: 'a> Windows<'a, T> {
#[inline]
pub(super) fn new(slice: &'a [T], size: NonZeroUsize) -> Self {
Self { v: slice, size }
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Windows<'_, T> {
fn clone(&self) -> Self {
Windows { v: self.v, size: self.size }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Windows<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.size.get() > self.v.len() {
None
} else {
let ret = Some(&self.v[..self.size.get()]);
self.v = &self.v[1..];
ret
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.size.get() > self.v.len() {
(0, Some(0))
} else {
let size = self.v.len() - self.size.get() + 1;
(size, Some(size))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = self.size.get().overflowing_add(n);
if end > self.v.len() || overflow {
self.v = &[];
None
} else {
let nth = &self.v[n..end];
self.v = &self.v[n + 1..];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.size.get() > self.v.len() {
None
} else {
let start = self.v.len() - self.size.get();
Some(&self.v[start..])
}
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
// SAFETY: since the caller guarantees that `i` is in bounds,
// which means that `i` cannot overflow an `isize`, and the
// slice created by `from_raw_parts` is a subslice of `self.v`
// thus is guaranteed to be valid for the lifetime `'a` of `self.v`.
unsafe { from_raw_parts(self.v.as_ptr().add(idx), self.size.get()) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Windows<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.size.get() > self.v.len() {
None
} else {
let ret = Some(&self.v[self.v.len() - self.size.get()..]);
self.v = &self.v[..self.v.len() - 1];
ret
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = self.v.len().overflowing_sub(n);
if end < self.size.get() || overflow {
self.v = &[];
None
} else {
let ret = &self.v[end - self.size.get()..end];
self.v = &self.v[..end - 1];
Some(ret)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for Windows<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Windows<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Windows<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for Windows<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for Windows<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`chunks`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.chunks(2);
/// ```
///
/// [`chunks`]: slice::chunks
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chunks<'a, T: 'a> {
v: &'a [T],
chunk_size: usize,
}
impl<'a, T: 'a> Chunks<'a, T> {
#[inline]
pub(super) fn new(slice: &'a [T], size: usize) -> Self {
Self { v: slice, chunk_size: size }
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Chunks<'_, T> {
fn clone(&self) -> Self {
Chunks { v: self.v, chunk_size: self.chunk_size }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for Chunks<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let chunksz = cmp::min(self.v.len(), self.chunk_size);
let (fst, snd) = self.v.split_at(chunksz);
self.v = snd;
Some(fst)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &[];
None
} else {
let end = match start.checked_add(self.chunk_size) {
Some(sum) => cmp::min(self.v.len(), sum),
None => self.v.len(),
};
let nth = &self.v[start..end];
self.v = &self.v[end..];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
Some(&self.v[start..])
}
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let start = idx * self.chunk_size;
// SAFETY: the caller guarantees that `i` is in bounds,
// which means that `start` must be in bounds of the
// underlying `self.v` slice, and we made sure that `len`
// is also in bounds of `self.v`. Thus, `start` cannot overflow
// an `isize`, and the slice constructed by `from_raw_parts`
// is a subslice of `self.v` which is guaranteed to be valid
// for the lifetime `'a` of `self.v`.
unsafe {
let len = cmp::min(self.v.len().unchecked_sub(start), self.chunk_size);
from_raw_parts(self.v.as_ptr().add(start), len)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
self.v = fst;
Some(snd)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = match start.checked_add(self.chunk_size) {
Some(res) => cmp::min(self.v.len(), res),
None => self.v.len(),
};
let nth_back = &self.v[start..end];
self.v = &self.v[..start];
Some(nth_back)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for Chunks<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Chunks<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Chunks<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for Chunks<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for Chunks<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`chunks_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.chunks_mut(2);
/// ```
///
/// [`chunks_mut`]: slice::chunks_mut
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ChunksMut<'a, T: 'a> {
v: &'a mut [T],
chunk_size: usize,
}
impl<'a, T: 'a> ChunksMut<'a, T> {
#[inline]
pub(super) fn new(slice: &'a mut [T], size: usize) -> Self {
Self { v: slice, chunk_size: size }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Iterator for ChunksMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let sz = cmp::min(self.v.len(), self.chunk_size);
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(sz);
self.v = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let end = match start.checked_add(self.chunk_size) {
Some(sum) => cmp::min(self.v.len(), sum),
None => self.v.len(),
};
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(end);
let (_, nth) = head.split_at_mut(start);
self.v = tail;
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let start = (self.v.len() - 1) / self.chunk_size * self.chunk_size;
Some(&mut self.v[start..])
}
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let start = idx * self.chunk_size;
// SAFETY: see comments for `Chunks::__iterator_get_unchecked`.
//
// Also note that the caller also guarantees that we're never called
// with the same index again, and that no other methods that will
// access this subslice are called, so it is valid for the returned
// slice to be mutable.
unsafe {
let len = cmp::min(self.v.len().unchecked_sub(start), self.chunk_size);
from_raw_parts_mut(self.v.as_mut_ptr().add(start), len)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let sz = if remainder != 0 { remainder } else { self.chunk_size };
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - sz);
self.v = head;
Some(tail)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = match start.checked_add(self.chunk_size) {
Some(res) => cmp::min(self.v.len(), res),
None => self.v.len(),
};
let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (head, nth_back) = temp.split_at_mut(start);
self.v = head;
Some(nth_back)
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for ChunksMut<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for ChunksMut<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for ChunksMut<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for ChunksMut<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for ChunksMut<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last
/// up to `chunk_size-1` elements will be omitted but can be retrieved from
/// the [`remainder`] function from the iterator.
///
/// This struct is created by the [`chunks_exact`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.chunks_exact(2);
/// ```
///
/// [`chunks_exact`]: slice::chunks_exact
/// [`remainder`]: ChunksExact::remainder
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub struct ChunksExact<'a, T: 'a> {
v: &'a [T],
rem: &'a [T],
chunk_size: usize,
}
impl<'a, T> ChunksExact<'a, T> {
#[inline]
pub(super) fn new(slice: &'a [T], chunk_size: usize) -> Self {
let rem = slice.len() % chunk_size;
let fst_len = slice.len() - rem;
// SAFETY: 0 <= fst_len <= slice.len() by construction above
let (fst, snd) = unsafe { slice.split_at_unchecked(fst_len) };
Self { v: fst, rem: snd, chunk_size }
}
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub fn remainder(&self) -> &'a [T] {
self.rem
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> Clone for ChunksExact<'_, T> {
fn clone(&self) -> Self {
ChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]<|fim▁hole|> fn next(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.chunk_size);
self.v = snd;
Some(fst)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &[];
None
} else {
let (_, snd) = self.v.split_at(start);
self.v = snd;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let start = idx * self.chunk_size;
// SAFETY: mostly identical to `Chunks::__iterator_get_unchecked`.
unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) }
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
self.v = fst;
Some(snd)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = start + self.chunk_size;
let nth_back = &self.v[start..end];
self.v = &self.v[..start];
Some(nth_back)
}
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> ExactSizeIterator for ChunksExact<'_, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for ChunksExact<'_, T> {}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> FusedIterator for ChunksExact<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for ChunksExact<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for ChunksExact<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last up to
/// `chunk_size-1` elements will be omitted but can be retrieved from the
/// [`into_remainder`] function from the iterator.
///
/// This struct is created by the [`chunks_exact_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.chunks_exact_mut(2);
/// ```
///
/// [`chunks_exact_mut`]: slice::chunks_exact_mut
/// [`into_remainder`]: ChunksExactMut::into_remainder
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub struct ChunksExactMut<'a, T: 'a> {
v: &'a mut [T],
rem: &'a mut [T],
chunk_size: usize,
}
impl<'a, T> ChunksExactMut<'a, T> {
#[inline]
pub(super) fn new(slice: &'a mut [T], chunk_size: usize) -> Self {
let rem = slice.len() % chunk_size;
let fst_len = slice.len() - rem;
// SAFETY: 0 <= fst_len <= slice.len() by construction above
let (fst, snd) = unsafe { slice.split_at_mut_unchecked(fst_len) };
Self { v: fst, rem: snd, chunk_size }
}
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub fn into_remainder(self) -> &'a mut [T] {
self.rem
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> Iterator for ChunksExactMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(self.chunk_size);
self.v = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (start, overflow) = n.overflowing_mul(self.chunk_size);
if start >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let (_, snd) = tmp.split_at_mut(start);
self.v = snd;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let start = idx * self.chunk_size;
// SAFETY: see comments for `ChunksMut::__iterator_get_unchecked`.
unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) }
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for ChunksExactMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
self.v = head;
Some(tail)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
let start = (len - 1 - n) * self.chunk_size;
let end = start + self.chunk_size;
let (temp, _tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (head, nth_back) = temp.split_at_mut(start);
self.v = head;
Some(nth_back)
}
}
}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> ExactSizeIterator for ChunksExactMut<'_, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for ChunksExactMut<'_, T> {}
#[stable(feature = "chunks_exact", since = "1.31.0")]
impl<T> FusedIterator for ChunksExactMut<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for ChunksExactMut<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for ChunksExactMut<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// A windowed iterator over a slice in overlapping chunks (`N` elements at a
/// time), starting at the beginning of the slice
///
/// This struct is created by the [`array_windows`] method on [slices].
///
/// # Example
///
/// ```
/// #![feature(array_windows)]
///
/// let slice = [0, 1, 2, 3];
/// let iter = slice.array_windows::<2>();
/// ```
///
/// [`array_windows`]: slice::array_windows
/// [slices]: slice
#[derive(Debug, Clone, Copy)]
#[unstable(feature = "array_windows", issue = "75027")]
pub struct ArrayWindows<'a, T: 'a, const N: usize> {
slice_head: *const T,
num: usize,
marker: PhantomData<&'a [T; N]>,
}
impl<'a, T: 'a, const N: usize> ArrayWindows<'a, T, N> {
#[inline]
pub(super) fn new(slice: &'a [T]) -> Self {
let num_windows = slice.len().saturating_sub(N - 1);
Self { slice_head: slice.as_ptr(), num: num_windows, marker: PhantomData }
}
}
#[unstable(feature = "array_windows", issue = "75027")]
impl<'a, T, const N: usize> Iterator for ArrayWindows<'a, T, N> {
type Item = &'a [T; N];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.num == 0 {
return None;
}
// SAFETY:
// This is safe because it's indexing into a slice guaranteed to be length > N.
let ret = unsafe { &*self.slice_head.cast::<[T; N]>() };
// SAFETY: Guaranteed that there are at least 1 item remaining otherwise
// earlier branch would've been hit
self.slice_head = unsafe { self.slice_head.add(1) };
self.num -= 1;
Some(ret)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.num, Some(self.num))
}
#[inline]
fn count(self) -> usize {
self.num
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
if self.num <= n {
self.num = 0;
return None;
}
// SAFETY:
// This is safe because it's indexing into a slice guaranteed to be length > N.
let ret = unsafe { &*self.slice_head.add(n).cast::<[T; N]>() };
// SAFETY: Guaranteed that there are at least n items remaining
self.slice_head = unsafe { self.slice_head.add(n + 1) };
self.num -= n + 1;
Some(ret)
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.nth(self.num.checked_sub(1)?)
}
}
#[unstable(feature = "array_windows", issue = "75027")]
impl<'a, T, const N: usize> DoubleEndedIterator for ArrayWindows<'a, T, N> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T; N]> {
if self.num == 0 {
return None;
}
// SAFETY: Guaranteed that there are n items remaining, n-1 for 0-indexing.
let ret = unsafe { &*self.slice_head.add(self.num - 1).cast::<[T; N]>() };
self.num -= 1;
Some(ret)
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<&'a [T; N]> {
if self.num <= n {
self.num = 0;
return None;
}
// SAFETY: Guaranteed that there are n items remaining, n-1 for 0-indexing.
let ret = unsafe { &*self.slice_head.add(self.num - (n + 1)).cast::<[T; N]>() };
self.num -= n + 1;
Some(ret)
}
}
#[unstable(feature = "array_windows", issue = "75027")]
impl<T, const N: usize> ExactSizeIterator for ArrayWindows<'_, T, N> {
fn is_empty(&self) -> bool {
self.num == 0
}
}
/// An iterator over a slice in (non-overlapping) chunks (`N` elements at a
/// time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last
/// up to `N-1` elements will be omitted but can be retrieved from
/// the [`remainder`] function from the iterator.
///
/// This struct is created by the [`array_chunks`] method on [slices].
///
/// # Example
///
/// ```
/// #![feature(array_chunks)]
///
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.array_chunks::<2>();
/// ```
///
/// [`array_chunks`]: slice::array_chunks
/// [`remainder`]: ArrayChunks::remainder
/// [slices]: slice
#[derive(Debug)]
#[unstable(feature = "array_chunks", issue = "74985")]
pub struct ArrayChunks<'a, T: 'a, const N: usize> {
iter: Iter<'a, [T; N]>,
rem: &'a [T],
}
impl<'a, T, const N: usize> ArrayChunks<'a, T, N> {
#[inline]
pub(super) fn new(slice: &'a [T]) -> Self {
let (array_slice, rem) = slice.as_chunks();
Self { iter: array_slice.iter(), rem }
}
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `N-1`
/// elements.
#[unstable(feature = "array_chunks", issue = "74985")]
pub fn remainder(&self) -> &'a [T] {
self.rem
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[unstable(feature = "array_chunks", issue = "74985")]
impl<T, const N: usize> Clone for ArrayChunks<'_, T, N> {
fn clone(&self) -> Self {
ArrayChunks { iter: self.iter.clone(), rem: self.rem }
}
}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<'a, T, const N: usize> Iterator for ArrayChunks<'a, T, N> {
type Item = &'a [T; N];
#[inline]
fn next(&mut self) -> Option<&'a [T; N]> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn count(self) -> usize {
self.iter.count()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
#[inline]
fn last(self) -> Option<Self::Item> {
self.iter.last()
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> &'a [T; N] {
// SAFETY: The safety guarantees of `__iterator_get_unchecked` are
// transferred to the caller.
unsafe { self.iter.__iterator_get_unchecked(i) }
}
}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunks<'a, T, N> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T; N]> {
self.iter.next_back()
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth_back(n)
}
}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<T, const N: usize> ExactSizeIterator for ArrayChunks<'_, T, N> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, const N: usize> TrustedLen for ArrayChunks<'_, T, N> {}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<T, const N: usize> FusedIterator for ArrayChunks<'_, T, N> {}
#[doc(hidden)]
#[unstable(feature = "array_chunks", issue = "74985")]
unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunks<'a, T, N> {}
#[doc(hidden)]
#[unstable(feature = "array_chunks", issue = "74985")]
unsafe impl<'a, T, const N: usize> TrustedRandomAccessNoCoerce for ArrayChunks<'a, T, N> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`N` elements
/// at a time), starting at the beginning of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last
/// up to `N-1` elements will be omitted but can be retrieved from
/// the [`into_remainder`] function from the iterator.
///
/// This struct is created by the [`array_chunks_mut`] method on [slices].
///
/// # Example
///
/// ```
/// #![feature(array_chunks)]
///
/// let mut slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.array_chunks_mut::<2>();
/// ```
///
/// [`array_chunks_mut`]: slice::array_chunks_mut
/// [`into_remainder`]: ../../std/slice/struct.ArrayChunksMut.html#method.into_remainder
/// [slices]: slice
#[derive(Debug)]
#[unstable(feature = "array_chunks", issue = "74985")]
pub struct ArrayChunksMut<'a, T: 'a, const N: usize> {
iter: IterMut<'a, [T; N]>,
rem: &'a mut [T],
}
impl<'a, T, const N: usize> ArrayChunksMut<'a, T, N> {
#[inline]
pub(super) fn new(slice: &'a mut [T]) -> Self {
let (array_slice, rem) = slice.as_chunks_mut();
Self { iter: array_slice.iter_mut(), rem }
}
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `N-1`
/// elements.
#[unstable(feature = "array_chunks", issue = "74985")]
pub fn into_remainder(self) -> &'a mut [T] {
self.rem
}
}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<'a, T, const N: usize> Iterator for ArrayChunksMut<'a, T, N> {
type Item = &'a mut [T; N];
#[inline]
fn next(&mut self) -> Option<&'a mut [T; N]> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn count(self) -> usize {
self.iter.count()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
#[inline]
fn last(self) -> Option<Self::Item> {
self.iter.last()
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> &'a mut [T; N] {
// SAFETY: The safety guarantees of `__iterator_get_unchecked` are transferred to
// the caller.
unsafe { self.iter.__iterator_get_unchecked(i) }
}
}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<'a, T, const N: usize> DoubleEndedIterator for ArrayChunksMut<'a, T, N> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T; N]> {
self.iter.next_back()
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth_back(n)
}
}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<T, const N: usize> ExactSizeIterator for ArrayChunksMut<'_, T, N> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, const N: usize> TrustedLen for ArrayChunksMut<'_, T, N> {}
#[unstable(feature = "array_chunks", issue = "74985")]
impl<T, const N: usize> FusedIterator for ArrayChunksMut<'_, T, N> {}
#[doc(hidden)]
#[unstable(feature = "array_chunks", issue = "74985")]
unsafe impl<'a, T, const N: usize> TrustedRandomAccess for ArrayChunksMut<'a, T, N> {}
#[doc(hidden)]
#[unstable(feature = "array_chunks", issue = "74985")]
unsafe impl<'a, T, const N: usize> TrustedRandomAccessNoCoerce for ArrayChunksMut<'a, T, N> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`rchunks`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.rchunks(2);
/// ```
///
/// [`rchunks`]: slice::rchunks
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunks<'a, T: 'a> {
v: &'a [T],
chunk_size: usize,
}
impl<'a, T: 'a> RChunks<'a, T> {
#[inline]
pub(super) fn new(slice: &'a [T], size: usize) -> Self {
Self { v: slice, chunk_size: size }
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> Clone for RChunks<'_, T> {
fn clone(&self) -> Self {
RChunks { v: self.v, chunk_size: self.chunk_size }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunks<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let chunksz = cmp::min(self.v.len(), self.chunk_size);
let (fst, snd) = self.v.split_at(self.v.len() - chunksz);
self.v = fst;
Some(snd)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &[];
None
} else {
// Can't underflow because of the check above
let end = self.v.len() - end;
let start = match end.checked_sub(self.chunk_size) {
Some(sum) => sum,
None => 0,
};
let nth = &self.v[start..end];
self.v = &self.v[0..start];
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let rem = self.v.len() % self.chunk_size;
let end = if rem == 0 { self.chunk_size } else { rem };
Some(&self.v[0..end])
}
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let end = self.v.len() - idx * self.chunk_size;
let start = match end.checked_sub(self.chunk_size) {
None => 0,
Some(start) => start,
};
// SAFETY: mostly identical to `Chunks::__iterator_get_unchecked`.
unsafe { from_raw_parts(self.v.as_ptr().add(start), end - start) }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let chunksz = if remainder != 0 { remainder } else { self.chunk_size };
let (fst, snd) = self.v.split_at(chunksz);
self.v = snd;
Some(fst)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
// can't underflow because `n < len`
let offset_from_end = (len - 1 - n) * self.chunk_size;
let end = self.v.len() - offset_from_end;
let start = end.saturating_sub(self.chunk_size);
let nth_back = &self.v[start..end];
self.v = &self.v[end..];
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> ExactSizeIterator for RChunks<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunks<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunks<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for RChunks<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for RChunks<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
///
/// This struct is created by the [`rchunks_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.rchunks_mut(2);
/// ```
///
/// [`rchunks_mut`]: slice::rchunks_mut
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunksMut<'a, T: 'a> {
v: &'a mut [T],
chunk_size: usize,
}
impl<'a, T: 'a> RChunksMut<'a, T> {
#[inline]
pub(super) fn new(slice: &'a mut [T], size: usize) -> Self {
Self { v: slice, chunk_size: size }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunksMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let sz = cmp::min(self.v.len(), self.chunk_size);
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - sz);
self.v = head;
Some(tail)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.v.is_empty() {
(0, Some(0))
} else {
let n = self.v.len() / self.chunk_size;
let rem = self.v.len() % self.chunk_size;
let n = if rem > 0 { n + 1 } else { n };
(n, Some(n))
}
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
// Can't underflow because of the check above
let end = self.v.len() - end;
let start = match end.checked_sub(self.chunk_size) {
Some(sum) => sum,
None => 0,
};
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(start);
let (nth, _) = tail.split_at_mut(end - start);
self.v = head;
Some(nth)
}
}
#[inline]
fn last(self) -> Option<Self::Item> {
if self.v.is_empty() {
None
} else {
let rem = self.v.len() % self.chunk_size;
let end = if rem == 0 { self.chunk_size } else { rem };
Some(&mut self.v[0..end])
}
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let end = self.v.len() - idx * self.chunk_size;
let start = match end.checked_sub(self.chunk_size) {
None => 0,
Some(start) => start,
};
// SAFETY: see comments for `RChunks::__iterator_get_unchecked` and
// `ChunksMut::__iterator_get_unchecked`
unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), end - start) }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.is_empty() {
None
} else {
let remainder = self.v.len() % self.chunk_size;
let sz = if remainder != 0 { remainder } else { self.chunk_size };
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(sz);
self.v = tail;
Some(head)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
// can't underflow because `n < len`
let offset_from_end = (len - 1 - n) * self.chunk_size;
let end = self.v.len() - offset_from_end;
let start = end.saturating_sub(self.chunk_size);
let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (_, nth_back) = tmp.split_at_mut(start);
self.v = tail;
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> ExactSizeIterator for RChunksMut<'_, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunksMut<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunksMut<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for RChunksMut<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for RChunksMut<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last
/// up to `chunk_size-1` elements will be omitted but can be retrieved from
/// the [`remainder`] function from the iterator.
///
/// This struct is created by the [`rchunks_exact`] method on [slices].
///
/// # Example
///
/// ```
/// let slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.rchunks_exact(2);
/// ```
///
/// [`rchunks_exact`]: slice::rchunks_exact
/// [`remainder`]: ChunksExact::remainder
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunksExact<'a, T: 'a> {
v: &'a [T],
rem: &'a [T],
chunk_size: usize,
}
impl<'a, T> RChunksExact<'a, T> {
#[inline]
pub(super) fn new(slice: &'a [T], chunk_size: usize) -> Self {
let rem = slice.len() % chunk_size;
// SAFETY: 0 <= rem <= slice.len() by construction above
let (fst, snd) = unsafe { slice.split_at_unchecked(rem) };
Self { v: snd, rem: fst, chunk_size }
}
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "rchunks", since = "1.31.0")]
pub fn remainder(&self) -> &'a [T] {
self.rem
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Clone for RChunksExact<'a, T> {
fn clone(&self) -> RChunksExact<'a, T> {
RChunksExact { v: self.v, rem: self.rem, chunk_size: self.chunk_size }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunksExact<'a, T> {
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.v.len() - self.chunk_size);
self.v = fst;
Some(snd)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &[];
None
} else {
let (fst, _) = self.v.split_at(self.v.len() - end);
self.v = fst;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let end = self.v.len() - idx * self.chunk_size;
let start = end - self.chunk_size;
// SAFETY:
// SAFETY: mostmy identical to `Chunks::__iterator_get_unchecked`.
unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let (fst, snd) = self.v.split_at(self.chunk_size);
self.v = snd;
Some(fst)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &[];
None
} else {
// now that we know that `n` corresponds to a chunk,
// none of these operations can underflow/overflow
let offset = (len - n) * self.chunk_size;
let start = self.v.len() - offset;
let end = start + self.chunk_size;
let nth_back = &self.v[start..end];
self.v = &self.v[end..];
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> ExactSizeIterator for RChunksExact<'a, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunksExact<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunksExact<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for RChunksExact<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for RChunksExact<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) mutable chunks (`chunk_size`
/// elements at a time), starting at the end of the slice.
///
/// When the slice len is not evenly divided by the chunk size, the last up to
/// `chunk_size-1` elements will be omitted but can be retrieved from the
/// [`into_remainder`] function from the iterator.
///
/// This struct is created by the [`rchunks_exact_mut`] method on [slices].
///
/// # Example
///
/// ```
/// let mut slice = ['l', 'o', 'r', 'e', 'm'];
/// let iter = slice.rchunks_exact_mut(2);
/// ```
///
/// [`rchunks_exact_mut`]: slice::rchunks_exact_mut
/// [`into_remainder`]: ChunksExactMut::into_remainder
/// [slices]: slice
#[derive(Debug)]
#[stable(feature = "rchunks", since = "1.31.0")]
pub struct RChunksExactMut<'a, T: 'a> {
v: &'a mut [T],
rem: &'a mut [T],
chunk_size: usize,
}
impl<'a, T> RChunksExactMut<'a, T> {
#[inline]
pub(super) fn new(slice: &'a mut [T], chunk_size: usize) -> Self {
let rem = slice.len() % chunk_size;
// SAFETY: 0 <= rem <= slice.len() by construction above
let (fst, snd) = unsafe { slice.split_at_mut_unchecked(rem) };
Self { v: snd, rem: fst, chunk_size }
}
/// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "rchunks", since = "1.31.0")]
pub fn into_remainder(self) -> &'a mut [T] {
self.rem
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> Iterator for RChunksExactMut<'a, T> {
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (head, tail) = tmp.split_at_mut(tmp_len - self.chunk_size);
self.v = head;
Some(tail)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.v.len() / self.chunk_size;
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn nth(&mut self, n: usize) -> Option<&'a mut [T]> {
let (end, overflow) = n.overflowing_mul(self.chunk_size);
if end >= self.v.len() || overflow {
self.v = &mut [];
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let tmp_len = tmp.len();
let (fst, _) = tmp.split_at_mut(tmp_len - end);
self.v = fst;
self.next()
}
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
#[doc(hidden)]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
let end = self.v.len() - idx * self.chunk_size;
let start = end - self.chunk_size;
// SAFETY: see comments for `RChunksMut::__iterator_get_unchecked`.
unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) }
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.v.len() < self.chunk_size {
None
} else {
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(self.chunk_size);
self.v = tail;
Some(head)
}
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
let len = self.len();
if n >= len {
self.v = &mut [];
None
} else {
// now that we know that `n` corresponds to a chunk,
// none of these operations can underflow/overflow
let offset = (len - n) * self.chunk_size;
let start = self.v.len() - offset;
let end = start + self.chunk_size;
let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
let (_, nth_back) = tmp.split_at_mut(start);
self.v = tail;
Some(nth_back)
}
}
}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> ExactSizeIterator for RChunksExactMut<'_, T> {
fn is_empty(&self) -> bool {
self.v.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for RChunksExactMut<'_, T> {}
#[stable(feature = "rchunks", since = "1.31.0")]
impl<T> FusedIterator for RChunksExactMut<'_, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for RChunksExactMut<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for RChunksExactMut<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for Iter<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<'a, T> TrustedRandomAccessNoCoerce for IterMut<'a, T> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over slice in (non-overlapping) chunks separated by a predicate.
///
/// This struct is created by the [`group_by`] method on [slices].
///
/// [`group_by`]: slice::group_by
/// [slices]: slice
#[unstable(feature = "slice_group_by", issue = "80552")]
pub struct GroupBy<'a, T: 'a, P> {
slice: &'a [T],
predicate: P,
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> GroupBy<'a, T, P> {
pub(super) fn new(slice: &'a [T], predicate: P) -> Self {
GroupBy { slice, predicate }
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> Iterator for GroupBy<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.slice.is_empty() {
None
} else {
let mut len = 1;
let mut iter = self.slice.windows(2);
while let Some([l, r]) = iter.next() {
if (self.predicate)(l, r) { len += 1 } else { break }
}
let (head, tail) = self.slice.split_at(len);
self.slice = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.slice.is_empty() { (0, Some(0)) } else { (1, Some(self.slice.len())) }
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> DoubleEndedIterator for GroupBy<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.slice.is_empty() {
None
} else {
let mut len = 1;
let mut iter = self.slice.windows(2);
while let Some([l, r]) = iter.next_back() {
if (self.predicate)(l, r) { len += 1 } else { break }
}
let (head, tail) = self.slice.split_at(self.slice.len() - len);
self.slice = head;
Some(tail)
}
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> FusedIterator for GroupBy<'a, T, P> where P: FnMut(&T, &T) -> bool {}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for GroupBy<'a, T, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GroupBy").field("slice", &self.slice).finish()
}
}
/// An iterator over slice in (non-overlapping) mutable chunks separated
/// by a predicate.
///
/// This struct is created by the [`group_by_mut`] method on [slices].
///
/// [`group_by_mut`]: slice::group_by_mut
/// [slices]: slice
#[unstable(feature = "slice_group_by", issue = "80552")]
pub struct GroupByMut<'a, T: 'a, P> {
slice: &'a mut [T],
predicate: P,
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> GroupByMut<'a, T, P> {
pub(super) fn new(slice: &'a mut [T], predicate: P) -> Self {
GroupByMut { slice, predicate }
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> Iterator for GroupByMut<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.slice.is_empty() {
None
} else {
let mut len = 1;
let mut iter = self.slice.windows(2);
while let Some([l, r]) = iter.next() {
if (self.predicate)(l, r) { len += 1 } else { break }
}
let slice = mem::take(&mut self.slice);
let (head, tail) = slice.split_at_mut(len);
self.slice = tail;
Some(head)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.slice.is_empty() { (0, Some(0)) } else { (1, Some(self.slice.len())) }
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> DoubleEndedIterator for GroupByMut<'a, T, P>
where
P: FnMut(&T, &T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.slice.is_empty() {
None
} else {
let mut len = 1;
let mut iter = self.slice.windows(2);
while let Some([l, r]) = iter.next_back() {
if (self.predicate)(l, r) { len += 1 } else { break }
}
let slice = mem::take(&mut self.slice);
let (head, tail) = slice.split_at_mut(slice.len() - len);
self.slice = head;
Some(tail)
}
}
}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a, P> FusedIterator for GroupByMut<'a, T, P> where P: FnMut(&T, &T) -> bool {}
#[unstable(feature = "slice_group_by", issue = "80552")]
impl<'a, T: 'a + fmt::Debug, P> fmt::Debug for GroupByMut<'a, T, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GroupByMut").field("slice", &self.slice).finish()
}
}<|fim▁end|> | impl<'a, T> Iterator for ChunksExact<'a, T> {
type Item = &'a [T];
#[inline] |
<|file_name|>Expedia.py<|end_file_name|><|fim▁begin|>import re
import urllib.request
import logging
import math
from bs4 import BeautifulSoup
from Vacation import VacationPackage, Departures, Destinations, Duration
ALL_PAGES = True
DEPARTURE_MAPPING = {
Departures.OTTAWA: "YOW"
}
DEPARTURE_CITY_MAPPING = {
Departures.OTTAWA: "Ottawa"
}
DESTINATION_MAPPING = {
Destinations.MAYAN_RIVIERA: "24%7C3%2C4%2C5%2C7%2C8%2C9%2C10%2C11%2C12%2C14%7C3%2C6%2C130%2C163%2C216%2C273%2C275%2C411%2C422%2C483%2C577%2C603%2C666%2C766%2C775%2C804%2C822%2C836%2C871%2C872%2C873%2C896%2C897%2C908%2C940%2C942%2C974%2C980%2C1001%2C1003%2C1004%2C1006%2C1104%2C1189%2C1350%2C1351%2C1352%2C1373%2C1566%2C1595%2C1607%2C1616%2C1692%2C1695%2C1703%2C1705%2C1708%2C1716%2C1840%2C1900%2C1928%2C2064%2C2082%2C2093%2C2098%2C2114%2C2118%2C2120%2C2172%2C2185%2C2371%2C2565%2C2718%2C2719%2C2739%2C2823%2C3012%2C3062%2C3088%2C3105%2C7700%2C7742%2C8721%2C8791%2C9267%2C9343%2C9422%2C9557%2C9558%2C9575%2C9576%2C10196%2C10314%2C10368%2C10453%2C10647%2C10652%2C10659%2C10663%2C10698%2C10837%2C10849%2C10895%2C10904%2C10971%2C11040%2C11063%2C11116%2C11174%2C11202",
Destinations.ARUBA: "29%7C7%2C14%7C177%2C179%2C958%2C1026%2C1027%2C1028%2C1124%2C1364%2C1564%2C1680%2C1681%2C1733%2C2054%2C10351%2C10650"
}
DESTINATION_CITY_MAPPING = {
Destinations.MAYAN_RIVIERA: "Riviera+Maya",
Destinations.ARUBA: "Aruba"
}
DESTINATION_COUNTRY_MAPPING = {
Destinations.MAYAN_RIVIERA: "Mexico",
Destinations.ARUBA: "Aruba"
}
DURATION_MAPPING = {
Duration.DAYS_7: "7DAYS",
Duration.DAYS_10: "10DAYS"
}
class ExpediaScraper:
def fetch_vacation_packages(self, vacation_request):
expedia_vacation_request = ExpediaVacation_request(vacation_request)
return ExpediaVacationScraper(expedia_vacation_request).fetch_vacation_packages()
<|fim▁hole|> def __init__(self, vacation_request):
self.vacation_request = vacation_request
self.from_code = DEPARTURE_MAPPING[vacation_request.departure_city]
self.origin_city = DEPARTURE_CITY_MAPPING[vacation_request.departure_city]
self.to = DESTINATION_MAPPING[vacation_request.destination]
self.to_city = DESTINATION_CITY_MAPPING[vacation_request.destination]
self.to_country = DESTINATION_COUNTRY_MAPPING[vacation_request.destination]
self.date = vacation_request.date
self.duration = DURATION_MAPPING[vacation_request.duration]
self.occupancy = "D"
self.adults = str(vacation_request.adults)
class ExpediaVacationScraper:
def __init__(self, expedia_vacation_request):
self.expedia_vacation_request = expedia_vacation_request
self.from_code = expedia_vacation_request.from_code
self.origin_city = expedia_vacation_request.origin_city
self.to = expedia_vacation_request.to
self.to_city = expedia_vacation_request.to_city
self.to_country = expedia_vacation_request.to_country
self.date = expedia_vacation_request.date
self.duration = expedia_vacation_request.duration
self.occupancy = expedia_vacation_request.occupancy
self.adults = expedia_vacation_request.adults
self.original_duration = expedia_vacation_request.vacation_request.duration
def fetch_vacation_packages(self):
results = []
page = 0
fetch_page = True
while fetch_page:
fetch_page = False
page += 1
url = "https://www.expedia.ca/all-inclusive-search?origin=" + self.from_code + "&destination=" + self.to + "&fromDate=" + self.date + "&duration=" + self.duration + "&pagingFlag=Y&pageIndex=" + str(
page) + "&occupancy=" + self.occupancy + "&originCityName=" + self.origin_city + "&destinationCityName=" + self.to_city + "&country=" + self.to_country + "&sortBy=&langid=4105&numAdults=" + self.adults + "&numChildren=0&numRooms=1"
logging.debug("Fetching URL " + url)
f = urllib.request.urlopen(url)
html = f.read()
logging.info("Done, parsing results")
soup = BeautifulSoup(html, "html.parser")
for tag in soup.find_all("div", class_="flex-card"):
# all stop information
primaryBlock = tag.find_all('div', class_='flex-area-primary')
name = primaryBlock[0].find_all('h5')[0].find_all('a')[0].get_text().strip()
primary_items = primaryBlock[0].find_all('div', class_='secondary')
city = primary_items[0].get_text().strip()
descr = primary_items[1].get_text().strip()
dates = primary_items[2].get_text().strip()
oper = primary_items[3].find(text=True).strip()
match = re.search('Operated by (.*),', oper)
oper = match.group(1).strip()
match = re.search('Depart:(.*)Return:(.*)', dates)
depart = match.group(1).strip()
retr = match.group(2).strip()
secondaryBlock = tag.find_all('div', class_='flex-area-secondary')
secondaryItems = secondaryBlock[0].find_all('div', class_='h1')
children = secondaryItems[0].findChildren()
if (len(children) > 1):
cost = children[1].get_text().strip()
else:
cost = children[0].get_text().strip()
finalCost = int(cost.replace(',', '').replace('C$', ''))
package = VacationPackage(name, oper, city, depart, retr, self.original_duration.name, finalCost)
results.append(package)
nav = soup.find_all("nav", class_="pagination")
if len(nav) > 0:
nav = nav[0]
data_per_page = int(nav.attrs['data-per-page'])
total_data = int(nav.attrs['data-total-results'])
number_of_pages = math.ceil(total_data / data_per_page)
if page < number_of_pages and ALL_PAGES:
logging.info("Completed " + str(page) + "/" + str(number_of_pages) + " pages from " + str(
total_data) + " results")
logging.info("Reading next page")
fetch_page = True
logging.info("Parsing complete")
return results<|fim▁end|> |
class ExpediaVacation_request: |
<|file_name|>0007_comments_comment_author.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('SocialNetworkModels', '0006_remove_comments_post_author'),
]
operations = [<|fim▁hole|> model_name='comments',
name='comment_author',
field=models.CharField(default='aaa', max_length=200),
preserve_default=False,
),
]<|fim▁end|> | migrations.AddField( |
<|file_name|>subscription_tracking.py<|end_file_name|><|fim▁begin|>class SubscriptionTracking(object):
def __init__(self, enable=None, text=None, html=None, substitution_tag=None):
self._enable = None
self._text = None
self._html = None
self._substitution_tag = None
if enable is not None:
self.enable = enable
if text is not None:
self.text = text
if html is not None:
self.html = html
if substitution_tag is not None:
self.substitution_tag = substitution_tag
@property
def enable(self):
return self._enable
@enable.setter
def enable(self, value):
self._enable = value
@property
def text(self):
return self._text
@text.setter
def text(self, value):
self._text = value
@property
def html(self):
return self._html
@html.setter
def html(self, value):
self._html = value
@property
def substitution_tag(self):
return self._substitution_tag
@substitution_tag.setter
def substitution_tag(self, value):
self._substitution_tag = value
def get(self):
subscription_tracking = {}
if self.enable is not None:
subscription_tracking["enable"] = self.enable<|fim▁hole|> if self.html is not None:
subscription_tracking["html"] = self.html
if self.substitution_tag is not None:
subscription_tracking["substitution_tag"] = self.substitution_tag
return subscription_tracking<|fim▁end|> |
if self.text is not None:
subscription_tracking["text"] = self.text
|
<|file_name|>asteroids.js<|end_file_name|><|fim▁begin|>//asteroid clone (core mechanics only)
//arrow keys to move + x to shoot
var bullets;
var asteroids;
var ship;
var shipImage, bulletImage, particleImage;
var MARGIN = 40;
function setup() {
createCanvas(800, 600);
bulletImage = loadImage('assets/asteroids_bullet.png');
shipImage = loadImage('assets/asteroids_ship0001.png');
particleImage = loadImage('assets/asteroids_particle.png');
ship = createSprite(width/2, height/2);
ship.maxSpeed = 6;
ship.friction = 0.98;
ship.setCollider('circle', 0, 0, 20);
ship.addImage('normal', shipImage);
ship.addAnimation('thrust', 'assets/asteroids_ship0002.png', 'assets/asteroids_ship0007.png');
asteroids = new Group();
bullets = new Group();
for(var i = 0; i<8; i++) {
var ang = random(360);
var px = width/2 + 1000 * cos(radians(ang));
var py = height/2+ 1000 * sin(radians(ang));
createAsteroid(3, px, py);
}
}
function draw() {
background(0);
fill(255);
textAlign(CENTER);
text('Controls: Arrow Keys + X', width/2, 20);
for(var i=0; i<allSprites.length; i++) {
var s = allSprites[i];
if(s.position.x<-MARGIN) s.position.x = width+MARGIN;
if(s.position.x>width+MARGIN) s.position.x = -MARGIN;
if(s.position.y<-MARGIN) s.position.y = height+MARGIN;
if(s.position.y>height+MARGIN) s.position.y = -MARGIN;
}
asteroids.overlap(bullets, asteroidHit);
ship.bounce(asteroids);
if(keyDown(LEFT_ARROW))
ship.rotation -= 4;
if(keyDown(RIGHT_ARROW))
ship.rotation += 4;
if(keyDown(UP_ARROW))
{
ship.addSpeed(0.2, ship.rotation);
ship.changeAnimation('thrust');
}
else
ship.changeAnimation('normal');
if(keyWentDown('x'))
{
var bullet = createSprite(ship.position.x, ship.position.y);
bullet.addImage(bulletImage);
bullet.setSpeed(10+ship.getSpeed(), ship.rotation);
bullet.life = 30;
bullets.add(bullet);
}
<|fim▁hole|>
function createAsteroid(type, x, y) {
var a = createSprite(x, y);
var img = loadImage('assets/asteroid'+floor(random(0, 3))+'.png');
a.addImage(img);
a.setSpeed(2.5-(type/2), random(360));
a.rotationSpeed = 0.5;
//a.debug = true;
a.type = type;
if(type == 2)
a.scale = 0.6;
if(type == 1)
a.scale = 0.3;
a.mass = 2+a.scale;
a.setCollider('circle', 0, 0, 50);
asteroids.add(a);
return a;
}
function asteroidHit(asteroid, bullet) {
var newType = asteroid.type-1;
if(newType>0) {
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
createAsteroid(newType, asteroid.position.x, asteroid.position.y);
}
for(var i=0; i<10; i++) {
var p = createSprite(bullet.position.x, bullet.position.y);
p.addImage(particleImage);
p.setSpeed(random(3, 5), random(360));
p.friction = 0.95;
p.life = 15;
}
bullet.remove();
asteroid.remove();
}<|fim▁end|> | drawSprites();
} |
<|file_name|>shell_utils.py<|end_file_name|><|fim▁begin|>"""Set of utility functions for working with OS commands.
Functions in this module return the command string. These commands are composed but not executed.
"""
import os
from subprocess import call
HADOOP_CONF_DIR = '/etc/hadoop/conf'
def encrypt(key_file):
"""
Encrypt the data from stdin and write output to stdout.
:param key_file: The key file used to encrypt the stream.
"""
if not os.path.isfile(key_file):
raise ValueError("Cannot find key_file: %" % key_file)
return "openssl aes-256-cbc -salt -pass file:%s" % key_file
def decrypt(key_file):
"""
Decrypt the data from stdin and write output to stdout.
:param key_file: The key file used to decrypt the stream.
"""
if not os.path.isfile(key_file):
raise ValueError("Cannot find key_file: %" % key_file)
return "openssl aes-256-cbc -d -pass file:%s" % key_file
def compress(extension):
"""
Compress the data from stdin and write output to stdout.
:param extension: The compression format identified by the file extension. Allowed values are:
'gz' for gzip, 'bz' or 'bz2' for bzip.
"""
if extension == "gz":
cmd = "pigz" if exists("pigz") else "gzip"
elif extension == "bz" or extension == "bz2":
cmd = "bzip2"
elif extension == 'lzo':
cmd = "lzop"
else:
raise ValueError("Unknown compression format/file extension")
return cmd
<|fim▁hole|> """
Decompress the data from stdin and write output to stdout.
:param extension: The compression format identified by the file extension. Allowed values are:
'gz' for gzip, 'bz' or 'bz2' for bzip.
"""
if extension == "gz":
cmd = "pigz -d" if exists("pigz") else "gzip -d"
elif extension == "bz" or extension == "bz2":
cmd = "bzip2 -d"
elif extension == 'lzo':
cmd = "lzop -d"
else:
raise ValueError("Unknown compression format/file extension")
return cmd
def hdfs_cat(uri, conf=HADOOP_CONF_DIR):
"""
Fetch the data from the specified uri and write output to stdout.
:param uri: The HDFS URI.
:param conf: The hadoop config directory.
"""
return "hadoop --config %s dfs -cat %s" % (conf, uri)
def pv(size):
"""
Monitor the progress of data through a pipe. If 'pv' is not available, simply 'cat' it.
:param size: The size of the data, to calculate percentage.
"""
if exists('pv'):
return "pv --wait --size %s" % size
else:
return "cat"
def untar(directory):
"""
Untar the data from stdin into the specified directory.
:param directory: The directory to write files to.
"""
return "tar -C %s -x" % directory
def tar(path):
"""
Tar the path and write output to stdout.
:param path: All contents under path are 'tar'ed.
"""
if not os.path.exists(path):
raise ValueError("Invalid argument: 'path' doesn't exist")
path = path.rstrip(os.sep)
parent, base = os.path.split(path)
return "tar -C %s %s" % (parent, base)
def exists(cmd):
"""Return true if 'cmd' exists in $PATH."""
with open(os.devnull, "w") as f:
return call(['which', cmd], stdout=f) == 0 # No stdout.<|fim▁end|> |
def decompress(extension): |
<|file_name|>frontmatter.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use cobalt_config::DateTime;
use cobalt_config::SourceFormat;
use liquid;
use serde::Serialize;
use super::pagination;
use crate::error::Result;
#[derive(Debug, Eq, PartialEq, Default, Clone, Serialize)]
#[serde(deny_unknown_fields, default)]
pub struct Frontmatter {
pub permalink: cobalt_config::Permalink,
pub slug: kstring::KString,
pub title: kstring::KString,
pub description: Option<kstring::KString>,
pub excerpt: Option<kstring::KString>,
pub categories: Vec<kstring::KString>,
pub tags: Option<Vec<kstring::KString>>,
pub excerpt_separator: kstring::KString,
pub published_date: Option<DateTime>,
pub format: SourceFormat,
pub templated: bool,
pub layout: Option<kstring::KString>,
pub is_draft: bool,
pub weight: i32,
pub collection: kstring::KString,
pub data: liquid::Object,
pub pagination: Option<pagination::PaginationConfig>,
}
impl Frontmatter {
pub fn from_config(config: cobalt_config::Frontmatter) -> Result<Frontmatter> {
let cobalt_config::Frontmatter {
permalink,
slug,
title,
description,
excerpt,
categories,
tags,
excerpt_separator,
published_date,
format,
templated,
layout,
is_draft,
weight,
collection,
data,
pagination,
} = config;<|fim▁hole|>
if let Some(ref tags) = tags {
if tags.iter().any(|x| x.trim().is_empty()) {
failure::bail!("Empty strings are not allowed in tags");
}
}
let tags = if tags.as_ref().map(|t| t.len()).unwrap_or(0) == 0 {
None
} else {
tags
};
let fm = Frontmatter {
pagination: pagination
.and_then(|p| pagination::PaginationConfig::from_config(p, &permalink)),
permalink,
slug: slug.ok_or_else(|| failure::err_msg("No slug"))?,
title: title.ok_or_else(|| failure::err_msg("No title"))?,
description,
excerpt,
categories: categories.unwrap_or_else(Vec::new),
tags,
excerpt_separator: excerpt_separator.unwrap_or_else(|| "\n\n".into()),
published_date,
format: format.unwrap_or_default(),
#[cfg(feature = "preview_unstable")]
templated: templated.unwrap_or(false),
#[cfg(not(feature = "preview_unstable"))]
templated: templated.unwrap_or(true),
layout,
is_draft: is_draft.unwrap_or(false),
weight: weight.unwrap_or(0),
collection,
data,
};
if let Some(pagination) = &fm.pagination {
if !pagination::is_date_index_sorted(&pagination.date_index) {
failure::bail!("date_index is not correctly sorted: Year > Month > Day...");
}
}
Ok(fm)
}
}
impl fmt::Display for Frontmatter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let converted = serde_yaml::to_string(self).expect("should always be valid");
let subset = converted
.strip_prefix("---")
.unwrap_or_else(|| converted.as_str())
.trim();
let converted = if subset == "{}" { "" } else { subset };
if converted.is_empty() {
Ok(())
} else {
write!(f, "{}", converted)
}
}
}<|fim▁end|> |
let collection = collection.unwrap_or_default();
let permalink = permalink.unwrap_or_default(); |
<|file_name|>test_openid_provider.py<|end_file_name|><|fim▁begin|>#-*- encoding=utf-8 -*-
'''
Created on Jan 18, 2013
@author: brian
'''
import openid
from openid.fetchers import HTTPFetcher, HTTPResponse
from urlparse import parse_qs, urlparse
from django.conf import settings
from django.test import TestCase, LiveServerTestCase
from django.core.cache import cache
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from unittest import skipUnless
from student.tests.factories import UserFactory
from openedx.core.djangoapps.external_auth.views import provider_login
class MyFetcher(HTTPFetcher):
"""A fetcher that uses server-internal calls for performing HTTP
requests.
"""
def __init__(self, client):
"""@param client: A test client object"""
super(MyFetcher, self).__init__()
self.client = client
def fetch(self, url, body=None, headers=None):
"""Perform an HTTP request
@raises Exception: Any exception that can be raised by Django
@see: C{L{HTTPFetcher.fetch}}
"""
if body:
# method = 'POST'
# undo the URL encoding of the POST arguments
data = parse_qs(body)
response = self.client.post(url, data)
else:
# method = 'GET'
data = {}
if headers and 'Accept' in headers:
data['CONTENT_TYPE'] = headers['Accept']
response = self.client.get(url, data)
# Translate the test client response to the fetcher's HTTP response abstraction
content = response.content
final_url = url
response_headers = {}
if 'Content-Type' in response:
response_headers['content-type'] = response['Content-Type']
if 'X-XRDS-Location' in response:
response_headers['x-xrds-location'] = response['X-XRDS-Location']
status = response.status_code
return HTTPResponse(
body=content,
final_url=final_url,
headers=response_headers,
status=status,
)
class OpenIdProviderTest(TestCase):
"""
Tests of the OpenId login
"""
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_begin_login_with_xrds_url(self):
# the provider URL must be converted to an absolute URL in order to be
# used as an openid provider.
provider_url = reverse('openid-provider-xrds')
factory = RequestFactory()
request = factory.request()
abs_provider_url = request.build_absolute_uri(location=provider_url)
# In order for this absolute URL to work (i.e. to get xrds, then authentication)
# in the test environment, we either need a live server that works with the default
# fetcher (i.e. urlopen2), or a test server that is reached through a custom fetcher.
# Here we do the latter:
fetcher = MyFetcher(self.client)
openid.fetchers.setDefaultFetcher(fetcher, wrap_exceptions=False)
<|fim▁hole|>
url = reverse('openid-login')
resp = self.client.post(url)
code = 200
self.assertEqual(resp.status_code, code,
"got code {0} for url '{1}'. Expected code {2}"
.format(resp.status_code, url, code))
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_begin_login_with_login_url(self):
# the provider URL must be converted to an absolute URL in order to be
# used as an openid provider.
provider_url = reverse('openid-provider-login')
factory = RequestFactory()
request = factory.request()
abs_provider_url = request.build_absolute_uri(location=provider_url)
# In order for this absolute URL to work (i.e. to get xrds, then authentication)
# in the test environment, we either need a live server that works with the default
# fetcher (i.e. urlopen2), or a test server that is reached through a custom fetcher.
# Here we do the latter:
fetcher = MyFetcher(self.client)
openid.fetchers.setDefaultFetcher(fetcher, wrap_exceptions=False)
# now we can begin the login process by invoking a local openid client,
# with a pointer to the (also-local) openid provider:
with self.settings(OPENID_SSO_SERVER_URL=abs_provider_url):
url = reverse('openid-login')
resp = self.client.post(url)
code = 200
self.assertEqual(resp.status_code, code,
"got code {0} for url '{1}'. Expected code {2}"
.format(resp.status_code, url, code))
for expected_input in (
'<input name="openid.ns" type="hidden" value="http://specs.openid.net/auth/2.0" />',
'<input name="openid.ns.ax" type="hidden" value="http://openid.net/srv/ax/1.0" />',
'<input name="openid.ax.type.fullname" type="hidden" value="http://axschema.org/namePerson" />',
'<input type="submit" value="Continue" />',
'<input name="openid.ax.type.email" type="hidden" value="http://axschema.org/contact/email" />',
'<input name="openid.ax.type.lastname" '
'type="hidden" value="http://axschema.org/namePerson/last" />',
'<input name="openid.ax.type.firstname" '
'type="hidden" value="http://axschema.org/namePerson/first" />',
'<input name="openid.ax.required" type="hidden" '
'value="email,fullname,old_email,firstname,old_nickname,lastname,old_fullname,nickname" />',
'<input name="openid.ax.type.nickname" '
'type="hidden" value="http://axschema.org/namePerson/friendly" />',
'<input name="openid.ax.type.old_email" '
'type="hidden" value="http://schema.openid.net/contact/email" />',
'<input name="openid.ax.type.old_nickname" '
'type="hidden" value="http://schema.openid.net/namePerson/friendly" />',
'<input name="openid.ax.type.old_fullname" '
'type="hidden" value="http://schema.openid.net/namePerson" />',
'<input name="openid.identity" '
'type="hidden" value="http://specs.openid.net/auth/2.0/identifier_select" />',
'<input name="openid.claimed_id" '
'type="hidden" value="http://specs.openid.net/auth/2.0/identifier_select" />',
# should work on the test server as well
'<input name="openid.realm" '
'type="hidden" value="http://testserver/" />',
):
self.assertContains(resp, expected_input, html=True)
# not included here are elements that will vary from run to run:
# <input name="openid.return_to" type="hidden"
# value="http://testserver/openid/complete/?janrain_nonce=2013-01-23T06%3A20%3A17ZaN7j6H" />
# <input name="openid.assoc_handle" type="hidden" value="{HMAC-SHA1}{50ff8120}{rh87+Q==}" />
def attempt_login(self, expected_code, login_method='POST', **kwargs):
""" Attempt to log in through the open id provider login """
url = reverse('openid-provider-login')
args = {
"openid.mode": "checkid_setup",
"openid.return_to": "http://testserver/openid/complete/?janrain_nonce=2013-01-23T06%3A20%3A17ZaN7j6H",
"openid.assoc_handle": "{HMAC-SHA1}{50ff8120}{rh87+Q==}",
"openid.claimed_id": "http://specs.openid.net/auth/2.0/identifier_select",
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.realm": "http://testserver/",
"openid.identity": "http://specs.openid.net/auth/2.0/identifier_select",
"openid.ns.ax": "http://openid.net/srv/ax/1.0",
"openid.ax.mode": "fetch_request",
"openid.ax.required": "email,fullname,old_email,firstname,old_nickname,lastname,old_fullname,nickname",
"openid.ax.type.fullname": "http://axschema.org/namePerson",
"openid.ax.type.lastname": "http://axschema.org/namePerson/last",
"openid.ax.type.firstname": "http://axschema.org/namePerson/first",
"openid.ax.type.nickname": "http://axschema.org/namePerson/friendly",
"openid.ax.type.email": "http://axschema.org/contact/email",
"openid.ax.type.old_email": "http://schema.openid.net/contact/email",
"openid.ax.type.old_nickname": "http://schema.openid.net/namePerson/friendly",
"openid.ax.type.old_fullname": "http://schema.openid.net/namePerson",
}
# override the default args with any given arguments
for key in kwargs:
args["openid." + key] = kwargs[key]
if login_method == 'POST':
resp = self.client.post(url, args)
elif login_method == 'GET':
resp = self.client.get(url, args)
else:
self.fail('Invalid login method')
code = expected_code
self.assertEqual(resp.status_code, code,
"got code {0} for url '{1}'. Expected code {2}"
.format(resp.status_code, url, code))
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_open_id_setup(self):
""" Attempt a standard successful login """
self.attempt_login(200)
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_invalid_namespace(self):
""" Test for 403 error code when the namespace of the request is invalid"""
self.attempt_login(403, ns="http%3A%2F%2Fspecs.openid.net%2Fauth%2F2.0")
@override_settings(OPENID_PROVIDER_TRUSTED_ROOTS=['http://apps.cs50.edx.org'])
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_invalid_return_url(self):
""" Test for 403 error code when the url"""
self.attempt_login(403, return_to="http://apps.cs50.edx.or")
def _send_bad_redirection_login(self):
"""
Attempt to log in to the provider with setup parameters
Intentionally fail the login to force a redirect
"""
user = UserFactory()
factory = RequestFactory()
post_params = {'email': user.email, 'password': 'password'}
fake_url = 'fake url'
request = factory.post(reverse('openid-provider-login'), post_params)
openid_setup = {
'request': factory.request(),
'url': fake_url,
'post_params': {}
}
request.session = {
'openid_setup': openid_setup
}
response = provider_login(request)
return response
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_login_openid_handle_redirection(self):
""" Test to see that we can handle login redirection properly"""
response = self._send_bad_redirection_login()
self.assertEquals(response.status_code, 302)
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_login_openid_handle_redirection_ratelimited(self):
# try logging in 30 times, the default limit in the number of failed
# log in attempts before the rate gets limited
for _ in xrange(30):
self._send_bad_redirection_login()
response = self._send_bad_redirection_login()
# verify that we are not returning the default 403
self.assertEquals(response.status_code, 302)
# clear the ratelimit cache so that we don't fail other logins
cache.clear()
def _attempt_login_and_perform_final_response(self, user, profile_name):
"""
Performs full procedure of a successful OpenID provider login for user,
all required data is taken form ``user`` attribute which is an instance
of ``User`` model. As a convenience this method will also set
``profile.name`` for the user.
"""
url = reverse('openid-provider-login')
# login to the client so that we can persist session information
user.profile.name = profile_name
user.profile.save()
# It is asssumed that user's password is test (default for UserFactory)
self.client.login(username=user.username, password='test')
# login once to get the right session information
self.attempt_login(200)
post_args = {
'email': user.email,
'password': 'test'
}
# call url again, this time with username and password
return self.client.post(url, post_args)
@skipUnless(
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'), 'OpenID not enabled')
def test_provider_login_can_handle_unicode_email(self):
user = UserFactory(email=u"user.ąęł@gmail.com")
resp = self._attempt_login_and_perform_final_response(user, u"Jan ĄĘŁ")
location = resp['Location']
parsed_url = urlparse(location)
parsed_qs = parse_qs(parsed_url.query)
self.assertEquals(parsed_qs['openid.ax.type.ext1'][0], 'http://axschema.org/contact/email')
self.assertEquals(parsed_qs['openid.ax.type.ext0'][0], 'http://axschema.org/namePerson')
self.assertEquals(parsed_qs['openid.ax.value.ext0.1'][0],
user.profile.name.encode('utf-8')) # pylint: disable=no-member
self.assertEquals(parsed_qs['openid.ax.value.ext1.1'][0],
user.email.encode('utf-8')) # pylint: disable=no-member
@skipUnless(
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'), 'OpenID not enabled')
def test_provider_login_can_handle_unicode_email_invalid_password(self):
user = UserFactory(email=u"user.ąęł@gmail.com")
url = reverse('openid-provider-login')
# login to the client so that we can persist session information
user.profile.name = u"Jan ĄĘ"
user.profile.save()
# It is asssumed that user's password is test (default for UserFactory)
self.client.login(username=user.username, password='test')
# login once to get the right session information
self.attempt_login(200)
# We trigger situation where user password is invalid at last phase
# of openid login
post_args = {
'email': user.email,
'password': 'invalid-password'
}
# call url again, this time with username and password
return self.client.post(url, post_args)
@skipUnless(
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'), 'OpenID not enabled')
def test_provider_login_can_handle_unicode_email_inactive_account(self):
user = UserFactory(email=u"user.ąęł@gmail.com")
url = reverse('openid-provider-login')
# login to the client so that we can persist session information
user.profile.name = u'Jan ĄĘ'
user.profile.save() # pylint: disable=no-member
self.client.login(username=user.username, password='test')
# login once to get the right session information
self.attempt_login(200)
# We trigger situation where user is not active at final phase of
# OpenId login.
user.is_active = False
user.save() # pylint: disable=no-member
post_args = {
'email': user.email,
'password': 'test'
}
# call url again, this time with username and password
self.client.post(url, post_args)
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_openid_final_response(self):
user = UserFactory()
# login to the client so that we can persist session information
for name in ['Robot 33', '☃']:
resp = self._attempt_login_and_perform_final_response(user, name)
# all information is embedded in the redirect url
location = resp['Location']
# parse the url
parsed_url = urlparse(location)
parsed_qs = parse_qs(parsed_url.query)
self.assertEquals(parsed_qs['openid.ax.type.ext1'][0], 'http://axschema.org/contact/email')
self.assertEquals(parsed_qs['openid.ax.type.ext0'][0], 'http://axschema.org/namePerson')
self.assertEquals(parsed_qs['openid.ax.value.ext1.1'][0], user.email)
self.assertEquals(parsed_qs['openid.ax.value.ext0.1'][0], user.profile.name)
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_openid_invalid_password(self):
url = reverse('openid-provider-login')
user = UserFactory()
# login to the client so that we can persist session information
for method in ['POST', 'GET']:
self.client.login(username=user.username, password='test')
self.attempt_login(200, method)
openid_setup = self.client.session['openid_setup']
self.assertIn('post_params', openid_setup)
post_args = {
'email': user.email,
'password': 'bad_password',
}
# call url again, this time with username and password
resp = self.client.post(url, post_args)
self.assertEquals(resp.status_code, 302)
redirect_url = resp['Location']
parsed_url = urlparse(redirect_url)
query_params = parse_qs(parsed_url[4])
self.assertIn('openid.return_to', query_params)
self.assertTrue(
query_params['openid.return_to'][0].startswith('http://testserver/openid/complete/')
)
class OpenIdProviderLiveServerTest(LiveServerTestCase):
"""
In order for this absolute URL to work (i.e. to get xrds, then authentication)
in the test environment, we either need a live server that works with the default
fetcher (i.e. urlopen2), or a test server that is reached through a custom fetcher.
Here we do the former.
"""
@skipUnless(settings.FEATURES.get('AUTH_USE_OPENID') and
settings.FEATURES.get('AUTH_USE_OPENID_PROVIDER'),
'OpenID not enabled')
def test_begin_login(self):
# the provider URL must be converted to an absolute URL in order to be
# used as an openid provider.
provider_url = reverse('openid-provider-xrds')
factory = RequestFactory()
request = factory.request()
abs_provider_url = request.build_absolute_uri(location=provider_url)
# In order for this absolute URL to work (i.e. to get xrds, then authentication)
# in the test environment, we either need a live server that works with the default
# fetcher (i.e. urlopen2), or a test server that is reached through a custom fetcher.
# Here we do the latter:
fetcher = MyFetcher(self.client)
openid.fetchers.setDefaultFetcher(fetcher, wrap_exceptions=False)
# now we can begin the login process by invoking a local openid client,
# with a pointer to the (also-local) openid provider:
with self.settings(OPENID_SSO_SERVER_URL=abs_provider_url):
url = reverse('openid-login')
resp = self.client.post(url)
code = 200
self.assertEqual(resp.status_code, code,
"got code {0} for url '{1}'. Expected code {2}"
.format(resp.status_code, url, code))
@classmethod
def tearDownClass(cls):
"""
Workaround for a runtime error that occurs
intermittently when the server thread doesn't shut down
within 2 seconds.
Since the server is running in a Django thread and will
be terminated when the test suite terminates,
this shouldn't cause a resource allocation issue.
"""
try:
super(OpenIdProviderLiveServerTest, cls).tearDownClass()
except RuntimeError:
print "Warning: Could not shut down test server."<|fim▁end|> | # now we can begin the login process by invoking a local openid client,
# with a pointer to the (also-local) openid provider:
with self.settings(OPENID_SSO_SERVER_URL=abs_provider_url): |
<|file_name|>basis.js<|end_file_name|><|fim▁begin|>import BasisDrawerCloseZone from './_drawer-close-zone.js';
import BasisDrawer from './_drawer.js';
import BasisHamburgerBtn from './_hamburger-btn.js';
import BasisNavbar from './_navbar.js';
import BasisPageEffect from './_page-effect.js';
document.addEventListener(
'DOMContentLoaded',
() => {
new BasisDrawerCloseZone();
new BasisDrawer({drawer: '.c-drawer'});
new BasisDrawer({drawer: '.c-dropdown'});
new BasisHamburgerBtn();
new BasisNavbar();
new BasisPageEffect();
},
false<|fim▁hole|><|fim▁end|> | ); |
<|file_name|>test_hyperbolic.py<|end_file_name|><|fim▁begin|>from sympy import symbols, Symbol, sinh, nan, oo, zoo, pi, asinh, acosh, log, sqrt, \
coth, I, cot, E, tanh, tan, cosh, cos, S, sin, Rational, atanh, acoth, \
Integer, O, exp, sech, sec, csch
from sympy.utilities.pytest import raises
def test_sinh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert sinh(nan) == nan
assert sinh(zoo) == nan
assert sinh(oo) == oo
assert sinh(-oo) == -oo
assert sinh(0) == 0
assert sinh(1) == sinh(1)
assert sinh(-1) == -sinh(1)
assert sinh(x) == sinh(x)
assert sinh(-x) == -sinh(x)
assert sinh(pi) == sinh(pi)
assert sinh(-pi) == -sinh(pi)
assert sinh(2**1024 * E) == sinh(2**1024 * E)
assert sinh(-2**1024 * E) == -sinh(2**1024 * E)
assert sinh(pi*I) == 0
assert sinh(-pi*I) == 0
assert sinh(2*pi*I) == 0
assert sinh(-2*pi*I) == 0
assert sinh(-3*10**73*pi*I) == 0
assert sinh(7*10**103*pi*I) == 0
assert sinh(pi*I/2) == I
assert sinh(-pi*I/2) == -I
assert sinh(5*pi*I/2) == I
assert sinh(7*pi*I/2) == -I
assert sinh(pi*I/3) == S.Half*sqrt(3)*I
assert sinh(-2*pi*I/3) == -S.Half*sqrt(3)*I
assert sinh(pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(17*pi*I/4) == S.Half*sqrt(2)*I
assert sinh(-3*pi*I/4) == -S.Half*sqrt(2)*I
assert sinh(pi*I/6) == S.Half*I
assert sinh(-pi*I/6) == -S.Half*I
assert sinh(7*pi*I/6) == -S.Half*I
assert sinh(-5*pi*I/6) == -S.Half*I
assert sinh(pi*I/105) == sin(pi/105)*I
assert sinh(-pi*I/105) == -sin(pi/105)*I
assert sinh(2 + 3*I) == sinh(2 + 3*I)
assert sinh(x*I) == sin(x)*I
assert sinh(k*pi*I) == 0
assert sinh(17*k*pi*I) == 0
assert sinh(k*pi*I/2) == sin(k*pi/2)*I
def test_sinh_series():
x = Symbol('x')
assert sinh(x).series(x, 0, 10) == \
x + x**3/6 + x**5/120 + x**7/5040 + x**9/362880 + O(x**10)
def test_cosh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert cosh(nan) == nan
assert cosh(zoo) == nan
assert cosh(oo) == oo
assert cosh(-oo) == oo
assert cosh(0) == 1
assert cosh(1) == cosh(1)
assert cosh(-1) == cosh(1)
assert cosh(x) == cosh(x)
assert cosh(-x) == cosh(x)
assert cosh(pi*I) == cos(pi)
assert cosh(-pi*I) == cos(pi)
assert cosh(2**1024 * E) == cosh(2**1024 * E)
assert cosh(-2**1024 * E) == cosh(2**1024 * E)
assert cosh(pi*I/2) == 0
assert cosh(-pi*I/2) == 0
assert cosh((-3*10**73 + 1)*pi*I/2) == 0
assert cosh((7*10**103 + 1)*pi*I/2) == 0
assert cosh(pi*I) == -1
assert cosh(-pi*I) == -1
assert cosh(5*pi*I) == -1
assert cosh(8*pi*I) == 1
assert cosh(pi*I/3) == S.Half
assert cosh(-2*pi*I/3) == -S.Half
assert cosh(pi*I/4) == S.Half*sqrt(2)
assert cosh(-pi*I/4) == S.Half*sqrt(2)
assert cosh(11*pi*I/4) == -S.Half*sqrt(2)
assert cosh(-3*pi*I/4) == -S.Half*sqrt(2)
assert cosh(pi*I/6) == S.Half*sqrt(3)
assert cosh(-pi*I/6) == S.Half*sqrt(3)
assert cosh(7*pi*I/6) == -S.Half*sqrt(3)
assert cosh(-5*pi*I/6) == -S.Half*sqrt(3)
assert cosh(pi*I/105) == cos(pi/105)
assert cosh(-pi*I/105) == cos(pi/105)
assert cosh(2 + 3*I) == cosh(2 + 3*I)
assert cosh(x*I) == cos(x)
assert cosh(k*pi*I) == cos(k*pi)
assert cosh(17*k*pi*I) == cos(17*k*pi)
assert cosh(k*pi) == cosh(k*pi)
def test_cosh_series():
x = Symbol('x')
assert cosh(x).series(x, 0, 10) == \
1 + x**2/2 + x**4/24 + x**6/720 + x**8/40320 + O(x**10)
def test_tanh():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert tanh(nan) == nan
assert tanh(zoo) == nan
assert tanh(oo) == 1
assert tanh(-oo) == -1
assert tanh(0) == 0
assert tanh(1) == tanh(1)
assert tanh(-1) == -tanh(1)
assert tanh(x) == tanh(x)
assert tanh(-x) == -tanh(x)
assert tanh(pi) == tanh(pi)
assert tanh(-pi) == -tanh(pi)
assert tanh(2**1024 * E) == tanh(2**1024 * E)
assert tanh(-2**1024 * E) == -tanh(2**1024 * E)
assert tanh(pi*I) == 0
assert tanh(-pi*I) == 0
assert tanh(2*pi*I) == 0
assert tanh(-2*pi*I) == 0
assert tanh(-3*10**73*pi*I) == 0
assert tanh(7*10**103*pi*I) == 0
assert tanh(pi*I/2) == tanh(pi*I/2)
assert tanh(-pi*I/2) == -tanh(pi*I/2)
assert tanh(5*pi*I/2) == tanh(5*pi*I/2)
assert tanh(7*pi*I/2) == tanh(7*pi*I/2)
assert tanh(pi*I/3) == sqrt(3)*I
assert tanh(-2*pi*I/3) == sqrt(3)*I
assert tanh(pi*I/4) == I
assert tanh(-pi*I/4) == -I
assert tanh(17*pi*I/4) == I
assert tanh(-3*pi*I/4) == I
assert tanh(pi*I/6) == I/sqrt(3)
assert tanh(-pi*I/6) == -I/sqrt(3)
assert tanh(7*pi*I/6) == I/sqrt(3)
assert tanh(-5*pi*I/6) == I/sqrt(3)
assert tanh(pi*I/105) == tan(pi/105)*I
assert tanh(-pi*I/105) == -tan(pi/105)*I
assert tanh(2 + 3*I) == tanh(2 + 3*I)
assert tanh(x*I) == tan(x)*I
assert tanh(k*pi*I) == 0
assert tanh(17*k*pi*I) == 0
assert tanh(k*pi*I/2) == tan(k*pi/2)*I
def test_tanh_series():
x = Symbol('x')
assert tanh(x).series(x, 0, 10) == \
x - x**3/3 + 2*x**5/15 - 17*x**7/315 + 62*x**9/2835 + O(x**10)
def test_coth():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert coth(nan) == nan
assert coth(zoo) == nan
assert coth(oo) == 1
assert coth(-oo) == -1
assert coth(0) == coth(0)
assert coth(0) == zoo
assert coth(1) == coth(1)
assert coth(-1) == -coth(1)
assert coth(x) == coth(x)
assert coth(-x) == -coth(x)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == cot(pi)*I
assert coth(2**1024 * E) == coth(2**1024 * E)
assert coth(-2**1024 * E) == -coth(2**1024 * E)
assert coth(pi*I) == -I*cot(pi)
assert coth(-pi*I) == I*cot(pi)
assert coth(2*pi*I) == -I*cot(2*pi)
assert coth(-2*pi*I) == I*cot(2*pi)
assert coth(-3*10**73*pi*I) == I*cot(3*10**73*pi)
assert coth(7*10**103*pi*I) == -I*cot(7*10**103*pi)
assert coth(pi*I/2) == 0
assert coth(-pi*I/2) == 0
assert coth(5*pi*I/2) == 0
assert coth(7*pi*I/2) == 0
assert coth(pi*I/3) == -I/sqrt(3)
assert coth(-2*pi*I/3) == -I/sqrt(3)
assert coth(pi*I/4) == -I
assert coth(-pi*I/4) == I
assert coth(17*pi*I/4) == -I
assert coth(-3*pi*I/4) == -I
assert coth(pi*I/6) == -sqrt(3)*I
assert coth(-pi*I/6) == sqrt(3)*I
assert coth(7*pi*I/6) == -sqrt(3)*I
assert coth(-5*pi*I/6) == -sqrt(3)*I
assert coth(pi*I/105) == -cot(pi/105)*I
assert coth(-pi*I/105) == cot(pi/105)*I
assert coth(2 + 3*I) == coth(2 + 3*I)
assert coth(x*I) == -cot(x)*I
assert coth(k*pi*I) == -cot(k*pi)*I
assert coth(17*k*pi*I) == -cot(17*k*pi)*I
assert coth(k*pi*I) == -cot(k*pi)*I
def test_coth_series():
x = Symbol('x')
assert coth(x).series(x, 0, 8) == \
1/x + x/3 - x**3/45 + 2*x**5/945 - x**7/4725 + O(x**8)
def test_csch():
x, y = symbols('x,y')
k = Symbol('k', integer=True)
assert csch(nan) == nan
assert csch(zoo) == nan
assert csch(oo) == 0
assert csch(-oo) == 0
assert csch(0) == zoo
assert csch(-1) == -csch(1)
assert csch(-x) == -csch(x)
assert csch(-pi) == -csch(pi)
assert csch(-2**1024 * E) == -csch(2**1024 * E)
assert csch(pi*I) == zoo
assert csch(-pi*I) == zoo
assert csch(2*pi*I) == zoo
assert csch(-2*pi*I) == zoo
assert csch(-3*10**73*pi*I) == zoo
assert csch(7*10**103*pi*I) == zoo
assert csch(pi*I/2) == -I
assert csch(-pi*I/2) == I
assert csch(5*pi*I/2) == -I
assert csch(7*pi*I/2) == I
assert csch(pi*I/3) == -2/sqrt(3)*I
assert csch(-2*pi*I/3) == 2/sqrt(3)*I
assert csch(pi*I/4) == -sqrt(2)*I
assert csch(-pi*I/4) == sqrt(2)*I
assert csch(7*pi*I/4) == sqrt(2)*I
assert csch(-3*pi*I/4) == sqrt(2)*I
assert csch(pi*I/6) == -2*I
assert csch(-pi*I/6) == 2*I
assert csch(7*pi*I/6) == 2*I
assert csch(-7*pi*I/6) == -2*I
assert csch(-5*pi*I/6) == 2*I
assert csch(pi*I/105) == -1/sin(pi/105)*I
assert csch(-pi*I/105) == 1/sin(pi/105)*I
assert csch(x*I) == -1/sin(x)*I
assert csch(k*pi*I) == zoo
assert csch(17*k*pi*I) == zoo<|fim▁hole|> assert csch(k*pi*I/2) == -1/sin(k*pi/2)*I
def test_csch_series():
x = Symbol('x')
assert csch(x).series(x, 0, 10) == \
1/ x - x/6 + 7*x**3/360 - 31*x**5/15120 + 127*x**7/604800 \
- 73*x**9/3421440 + O(x**10)
def test_sech():
x, y = symbols('x, y')
k = Symbol('k', integer=True)
assert sech(nan) == nan
assert sech(zoo) == nan
assert sech(oo) == 0
assert sech(-oo) == 0
assert sech(0) == 1
assert sech(-1) == sech(1)
assert sech(-x) == sech(x)
assert sech(pi*I) == sec(pi)
assert sech(-pi*I) == sec(pi)
assert sech(-2**1024 * E) == sech(2**1024 * E)
assert sech(pi*I/2) == zoo
assert sech(-pi*I/2) == zoo
assert sech((-3*10**73 + 1)*pi*I/2) == zoo
assert sech((7*10**103 + 1)*pi*I/2) == zoo
assert sech(pi*I) == -1
assert sech(-pi*I) == -1
assert sech(5*pi*I) == -1
assert sech(8*pi*I) == 1
assert sech(pi*I/3) == 2
assert sech(-2*pi*I/3) == -2
assert sech(pi*I/4) == sqrt(2)
assert sech(-pi*I/4) == sqrt(2)
assert sech(5*pi*I/4) == -sqrt(2)
assert sech(-5*pi*I/4) == -sqrt(2)
assert sech(pi*I/6) == 2/sqrt(3)
assert sech(-pi*I/6) == 2/sqrt(3)
assert sech(7*pi*I/6) == -2/sqrt(3)
assert sech(-5*pi*I/6) == -2/sqrt(3)
assert sech(pi*I/105) == 1/cos(pi/105)
assert sech(-pi*I/105) == 1/cos(pi/105)
assert sech(x*I) == 1/cos(x)
assert sech(k*pi*I) == 1/cos(k*pi)
assert sech(17*k*pi*I) == 1/cos(17*k*pi)
def test_sech_series():
x = Symbol('x')
assert sech(x).series(x, 0, 10) == \
1 - x**2/2 + 5*x**4/24 - 61*x**6/720 + 277*x**8/8064 + O(x**10)
def test_asinh():
x, y = symbols('x,y')
assert asinh(x) == asinh(x)
assert asinh(-x) == -asinh(x)
assert asinh(nan) == nan
assert asinh( 0) == 0
assert asinh(+1) == log(sqrt(2) + 1)
assert asinh(-1) == log(sqrt(2) - 1)
assert asinh(I) == pi*I/2
assert asinh(-I) == -pi*I/2
assert asinh(I/2) == pi*I/6
assert asinh(-I/2) == -pi*I/6
assert asinh(oo) == oo
assert asinh(-oo) == -oo
assert asinh(I*oo) == oo
assert asinh(-I *oo) == -oo
assert asinh(zoo) == zoo
assert asinh(I *(sqrt(3) - 1)/(2**(S(3)/2))) == pi*I/12
assert asinh(-I *(sqrt(3) - 1)/(2**(S(3)/2))) == -pi*I/12
assert asinh(I*(sqrt(5) - 1)/4) == pi*I/10
assert asinh(-I*(sqrt(5) - 1)/4) == -pi*I/10
assert asinh(I*(sqrt(5) + 1)/4) == 3*pi*I/10
assert asinh(-I*(sqrt(5) + 1)/4) == -3*pi*I/10
def test_asinh_series():
x = Symbol('x')
assert asinh(x).series(x, 0, 8) == \
x - x**3/6 + 3*x**5/40 - 5*x**7/112 + O(x**8)
t5 = asinh(x).taylor_term(5, x)
assert t5 == 3*x**5/40
assert asinh(x).taylor_term(7, x, t5, 0) == -5*x**7/112
def test_acosh():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcCosh/03/01/
# at specific points
x = Symbol('x')
assert acosh(-x) == acosh(-x)
assert acosh(1) == 0
assert acosh(-1) == pi*I
assert acosh(0) == I*pi/2
assert acosh(Rational(1, 2)) == I*pi/3
assert acosh(Rational(-1, 2)) == 2*pi*I/3
assert acosh(zoo) == oo
assert acosh(I) == log(I*(1 + sqrt(2)))
assert acosh(-I) == log(-I*(1 + sqrt(2)))
assert acosh((sqrt(3) - 1)/(2*sqrt(2))) == 5*pi*I/12
assert acosh(-(sqrt(3) - 1)/(2*sqrt(2))) == 7*pi*I/12
assert acosh(sqrt(2)/2) == I*pi/4
assert acosh(-sqrt(2)/2) == 3*I*pi/4
assert acosh(sqrt(3)/2) == I*pi/6
assert acosh(-sqrt(3)/2) == 5*I*pi/6
assert acosh(sqrt(2 + sqrt(2))/2) == I*pi/8
assert acosh(-sqrt(2 + sqrt(2))/2) == 7*I*pi/8
assert acosh(sqrt(2 - sqrt(2))/2) == 3*I*pi/8
assert acosh(-sqrt(2 - sqrt(2))/2) == 5*I*pi/8
assert acosh((1 + sqrt(3))/(2*sqrt(2))) == I*pi/12
assert acosh(-(1 + sqrt(3))/(2*sqrt(2))) == 11*I*pi/12
assert acosh((sqrt(5) + 1)/4) == I*pi/5
assert acosh(-(sqrt(5) + 1)/4) == 4*I*pi/5
def test_acosh_infinities():
assert acosh(oo) == oo
assert acosh(-oo) == oo
assert acosh(I*oo) == oo
assert acosh(-I*oo) == oo
def test_acosh_series():
x = Symbol('x')
assert acosh(x).series(x, 0, 8) == \
-I*x + pi*I/2 - I*x**3/6 - 3*I*x**5/40 - 5*I*x**7/112 + O(x**8)
t5 = acosh(x).taylor_term(5, x)
assert t5 == - 3*I*x**5/40
assert acosh(x).taylor_term(7, x, t5, 0) == - 5*I*x**7/112
# TODO please write more tests -- see issue 3751
def test_atanh():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcTanh/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert atanh(0) == 0
assert atanh(I) == I*pi/4
assert atanh(-I) == -I*pi/4
assert atanh(1) == oo
assert atanh(-1) == -oo
# at infinites
assert atanh(I*oo) == I*pi/2
assert atanh(-I*oo) == -I*pi/2
assert atanh(zoo) == nan
#properties
assert atanh(-x) == -atanh(x)
assert atanh(I/sqrt(3)) == I*pi/6
assert atanh(-I/sqrt(3)) == -I*pi/6
assert atanh(I*sqrt(3)) == I*pi/3
assert atanh(-I*sqrt(3)) == -I*pi/3
assert atanh(I*(1 + sqrt(2))) == 3*pi*I/8
assert atanh(I*(sqrt(2) - 1)) == pi*I/8
assert atanh(I*(1 - sqrt(2))) == -pi*I/8
assert atanh(-I*(1 + sqrt(2))) == -3*pi*I/8
assert atanh(I*sqrt(5 + 2*sqrt(5))) == 2*I*pi/5
assert atanh(-I*sqrt(5 + 2*sqrt(5))) == -2*I*pi/5
assert atanh(I*(2 - sqrt(3))) == pi*I/12
assert atanh(I*(sqrt(3) - 2)) == -pi*I/12
assert atanh(oo) == -I*pi/2
def test_atanh_series():
x = Symbol('x')
assert atanh(x).series(x, 0, 10) == \
x + x**3/3 + x**5/5 + x**7/7 + x**9/9 + O(x**10)
def test_atanh_infinities():
assert atanh(oo) == -I*pi/2
assert atanh(-oo) == I*pi/2
# TODO please write more tests -- see issue 3751
def test_acoth():
# TODO please write more tests -- see issue 3751
# From http://functions.wolfram.com/ElementaryFunctions/ArcCoth/03/01/
# at specific points
x = Symbol('x')
#at specific points
assert acoth(0) == I*pi/2
assert acoth(I) == -I*pi/4
assert acoth(-I) == I*pi/4
assert acoth(1) == oo
assert acoth(-1) == -oo
# at infinites
assert acoth(oo) == 0
assert acoth(-oo) == 0
assert acoth(I*oo) == 0
assert acoth(-I*oo) == 0
assert acoth(zoo) == 0
#properties
assert acoth(-x) == -acoth(x)
assert acoth(I/sqrt(3)) == -I*pi/3
assert acoth(-I/sqrt(3)) == I*pi/3
assert acoth(I*sqrt(3)) == -I*pi/6
assert acoth(-I*sqrt(3)) == I*pi/6
assert acoth(I*(1 + sqrt(2))) == -pi*I/8
assert acoth(-I*(sqrt(2) + 1)) == pi*I/8
assert acoth(I*(1 - sqrt(2))) == 3*pi*I/8
assert acoth(I*(sqrt(2) - 1)) == -3*pi*I/8
assert acoth(I*sqrt(5 + 2*sqrt(5))) == -I*pi/10
assert acoth(-I*sqrt(5 + 2*sqrt(5))) == I*pi/10
assert acoth(I*(2 + sqrt(3))) == -pi*I/12
assert acoth(-I*(2 + sqrt(3))) == pi*I/12
assert acoth(I*(2 - sqrt(3))) == -5*pi*I/12
assert acoth(I*(sqrt(3) - 2)) == 5*pi*I/12
def test_acoth_series():
x = Symbol('x')
assert acoth(x).series(x, 0, 10) == \
I*pi/2 + x + x**3/3 + x**5/5 + x**7/7 + x**9/9 + O(x**10)
def test_inverses():
x = Symbol('x')
assert sinh(x).inverse() == asinh
raises(AttributeError, lambda: cosh(x).inverse())
assert tanh(x).inverse() == atanh
assert coth(x).inverse() == acoth
assert asinh(x).inverse() == sinh
assert acosh(x).inverse() == cosh
assert atanh(x).inverse() == tanh
assert acoth(x).inverse() == coth
def test_leading_term():
x = Symbol('x')
assert cosh(x).as_leading_term(x) == 1
assert coth(x).as_leading_term(x) == 1/x
assert acosh(x).as_leading_term(x) == I*pi/2
assert acoth(x).as_leading_term(x) == I*pi/2
for func in [sinh, tanh, asinh, atanh]:
assert func(x).as_leading_term(x) == x
for func in [sinh, cosh, tanh, coth, asinh, acosh, atanh, acoth]:
for arg in (1/x, S.Half):
eq = func(arg)
assert eq.as_leading_term(x) == eq
for func in [csch, sech]:
eq = func(S.Half)
assert eq.as_leading_term(x) == eq
def test_complex():
a, b = symbols('a,b', real=True)
z = a + b*I
for func in [sinh, cosh, tanh, coth, sech, csch]:
assert func(z).conjugate() == func(a - b*I)
for deep in [True, False]:
assert sinh(z).expand(
complex=True, deep=deep) == sinh(a)*cos(b) + I*cosh(a)*sin(b)
assert cosh(z).expand(
complex=True, deep=deep) == cosh(a)*cos(b) + I*sinh(a)*sin(b)
assert tanh(z).expand(complex=True, deep=deep) == sinh(a)*cosh(
a)/(cos(b)**2 + sinh(a)**2) + I*sin(b)*cos(b)/(cos(b)**2 + sinh(a)**2)
assert coth(z).expand(complex=True, deep=deep) == sinh(a)*cosh(
a)/(sin(b)**2 + sinh(a)**2) - I*sin(b)*cos(b)/(sin(b)**2 + sinh(a)**2)
assert csch(z).expand(complex=True, deep=deep) == cos(b) * sinh(a) / (sin(b)**2\
*cosh(a)**2 + cos(b)**2 * sinh(a)**2) - I*sin(b) * cosh(a) / (sin(b)**2\
*cosh(a)**2 + cos(b)**2 * sinh(a)**2)
assert sech(z).expand(complex=True, deep=deep) == cos(b) * cosh(a) / (sin(b)**2\
*sinh(a)**2 + cos(b)**2 * cosh(a)**2) - I*sin(b) * sinh(a) / (sin(b)**2\
*sinh(a)**2 + cos(b)**2 * cosh(a)**2)
def test_complex_2899():
a, b = symbols('a,b', real=True)
for deep in [True, False]:
for func in [sinh, cosh, tanh, coth]:
assert func(a).expand(complex=True, deep=deep) == func(a)
def test_simplifications():
x = Symbol('x')
assert sinh(asinh(x)) == x
assert sinh(acosh(x)) == sqrt(x - 1) * sqrt(x + 1)
assert sinh(atanh(x)) == x/sqrt(1 - x**2)
assert sinh(acoth(x)) == 1/(sqrt(x - 1) * sqrt(x + 1))
assert cosh(asinh(x)) == sqrt(1 + x**2)
assert cosh(acosh(x)) == x
assert cosh(atanh(x)) == 1/sqrt(1 - x**2)
assert cosh(acoth(x)) == x/(sqrt(x - 1) * sqrt(x + 1))
assert tanh(asinh(x)) == x/sqrt(1 + x**2)
assert tanh(acosh(x)) == sqrt(x - 1) * sqrt(x + 1) / x
assert tanh(atanh(x)) == x
assert tanh(acoth(x)) == 1/x
assert coth(asinh(x)) == sqrt(1 + x**2)/x
assert coth(acosh(x)) == x/(sqrt(x - 1) * sqrt(x + 1))
assert coth(atanh(x)) == 1/x
assert coth(acoth(x)) == x
assert csch(asinh(x)) == 1/x
assert csch(acosh(x)) == 1/(sqrt(x - 1) * sqrt(x + 1))
assert csch(atanh(x)) == sqrt(1 - x**2)/x
assert csch(acoth(x)) == sqrt(x - 1) * sqrt(x + 1)
assert sech(asinh(x)) == 1/sqrt(1 + x**2)
assert sech(acosh(x)) == 1/x
assert sech(atanh(x)) == sqrt(1 - x**2)
assert sech(acoth(x)) == sqrt(x - 1) * sqrt(x + 1)/x
def test_issue_4136():
assert cosh(asinh(Integer(3)/2)) == sqrt(Integer(13)/4)
def test_sinh_rewrite():
x = Symbol('x')
assert sinh(x).rewrite(exp) == (exp(x) - exp(-x))/2 \
== sinh(x).rewrite('tractable')
assert sinh(x).rewrite(cosh) == -I*cosh(x + I*pi/2)
tanh_half = tanh(S.Half*x)
assert sinh(x).rewrite(tanh) == 2*tanh_half/(1 - tanh_half**2)
coth_half = coth(S.Half*x)
assert sinh(x).rewrite(coth) == 2*coth_half/(coth_half**2 - 1)
def test_cosh_rewrite():
x = Symbol('x')
assert cosh(x).rewrite(exp) == (exp(x) + exp(-x))/2 \
== cosh(x).rewrite('tractable')
assert cosh(x).rewrite(sinh) == -I*sinh(x + I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert cosh(x).rewrite(tanh) == (1 + tanh_half)/(1 - tanh_half)
coth_half = coth(S.Half*x)**2
assert cosh(x).rewrite(coth) == (coth_half + 1)/(coth_half - 1)
def test_tanh_rewrite():
x = Symbol('x')
assert tanh(x).rewrite(exp) == (exp(x) - exp(-x))/(exp(x) + exp(-x)) \
== tanh(x).rewrite('tractable')
assert tanh(x).rewrite(sinh) == I*sinh(x)/sinh(I*pi/2 - x)
assert tanh(x).rewrite(cosh) == I*cosh(I*pi/2 - x)/cosh(x)
assert tanh(x).rewrite(coth) == 1/coth(x)
def test_coth_rewrite():
x = Symbol('x')
assert coth(x).rewrite(exp) == (exp(x) + exp(-x))/(exp(x) - exp(-x)) \
== coth(x).rewrite('tractable')
assert coth(x).rewrite(sinh) == -I*sinh(I*pi/2 - x)/sinh(x)
assert coth(x).rewrite(cosh) == -I*cosh(x)/cosh(I*pi/2 - x)
assert coth(x).rewrite(tanh) == 1/tanh(x)
def test_csch_rewrite():
x = Symbol('x')
assert csch(x).rewrite(exp) == 1 / (exp(x)/2 - exp(-x)/2) \
== csch(x).rewrite('tractable')
assert csch(x).rewrite(cosh) == I/cosh(x + I*pi/2)
tanh_half = tanh(S.Half*x)
assert csch(x).rewrite(tanh) == (1 - tanh_half**2)/(2*tanh_half)
coth_half = coth(S.Half*x)
assert csch(x).rewrite(coth) == (coth_half**2 - 1)/(2*coth_half)
def test_sech_rewrite():
x = Symbol('x')
assert sech(x).rewrite(exp) == 1 / (exp(x)/2 + exp(-x)/2) \
== sech(x).rewrite('tractable')
assert sech(x).rewrite(sinh) == I/sinh(x + I*pi/2)
tanh_half = tanh(S.Half*x)**2
assert sech(x).rewrite(tanh) == (1 - tanh_half)/(1 + tanh_half)
coth_half = coth(S.Half*x)**2
assert sech(x).rewrite(coth) == (coth_half - 1)/(coth_half + 1)
def test_derivs():
x = Symbol('x')
assert coth(x).diff(x) == -sinh(x)**(-2)
assert sinh(x).diff(x) == cosh(x)
assert cosh(x).diff(x) == sinh(x)
assert tanh(x).diff(x) == -tanh(x)**2 + 1
assert csch(x).diff(x) == -coth(x)*csch(x)
assert sech(x).diff(x) == -tanh(x)*sech(x)
assert acoth(x).diff(x) == 1/(-x**2 + 1)
assert asinh(x).diff(x) == 1/sqrt(x**2 + 1)
assert acosh(x).diff(x) == 1/sqrt(x**2 - 1)
assert atanh(x).diff(x) == 1/(-x**2 + 1)
def test_sinh_expansion():
x,y = symbols('x,y')
assert sinh(x+y).expand(trig=True) == sinh(x)*cosh(y) + cosh(x)*sinh(y)
assert sinh(2*x).expand(trig=True) == 2*sinh(x)*cosh(x)
assert sinh(3*x).expand(trig=True).expand() == \
sinh(x)**3 + 3*sinh(x)*cosh(x)**2
def test_cosh_expansion():
x,y = symbols('x,y')
assert cosh(x+y).expand(trig=True) == cosh(x)*cosh(y) + sinh(x)*sinh(y)
assert cosh(2*x).expand(trig=True) == cosh(x)**2 + sinh(x)**2
assert cosh(3*x).expand(trig=True).expand() == \
3*sinh(x)**2*cosh(x) + cosh(x)**3<|fim▁end|> | |
<|file_name|>video-controls-ws.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';<|fim▁hole|> selector: 'fo-video-controls-ws',
templateUrl: 'video-controls-ws.component.html',
styleUrls: ['video-controls-ws.component.css']
})
export class VideoControlsWSComponent {
constructor (private vpService: VideoPlayerService) {
}
emitPlay() {
this.vpService.controlsSignals.emit(VideoPlayerSignals.play);
}
emitPause() {
this.vpService.controlsSignals.emit(VideoPlayerSignals.pause);
}
emitStop() {
this.vpService.controlsSignals.emit(VideoPlayerSignals.stop);
}
}<|fim▁end|> | import { VideoPlayerService, VideoPlayerSignals } from '../video-player.service';
@Component({
moduleId: module.id, |
<|file_name|>test_values.py<|end_file_name|><|fim▁begin|>import decimal
import os
from contextlib import contextmanager
from django.test import TestCase
from django.core.exceptions import ImproperlyConfigured
from mock import patch
from configurations.values import (Value, BooleanValue, IntegerValue,
FloatValue, DecimalValue, ListValue,
TupleValue, SetValue, DictValue,
URLValue, EmailValue, IPValue,
RegexValue, PathValue, SecretValue,
DatabaseURLValue, EmailURLValue,
CacheURLValue, BackendsValue,
CastingMixin, SearchURLValue)
@contextmanager
def env(**kwargs):
with patch.dict(os.environ, clear=True, **kwargs):
yield
class FailingCasterValue(CastingMixin, Value):
caster = 'non.existing.caster'
class ValueTests(TestCase):
def test_value(self):
value = Value('default', environ=False)
self.assertEqual(value.setup('TEST'), 'default')
with env(DJANGO_TEST='override'):
self.assertEqual(value.setup('TEST'), 'default')
@patch.dict(os.environ, clear=True, DJANGO_TEST='override')
def test_env_var(self):
value = Value('default')
self.assertEqual(value.setup('TEST'), 'override')
self.assertNotEqual(value.setup('TEST'), value.default)
self.assertEqual(value.to_python(os.environ['DJANGO_TEST']),
value.setup('TEST'))
def test_value_reuse(self):
value1 = Value('default')
value2 = Value(value1)
self.assertEqual(value1.setup('TEST1'), 'default')
self.assertEqual(value2.setup('TEST2'), 'default')
with env(DJANGO_TEST1='override1', DJANGO_TEST2='override2'):
self.assertEqual(value1.setup('TEST1'), 'override1')
self.assertEqual(value2.setup('TEST2'), 'override2')
def test_env_var_prefix(self):
with patch.dict(os.environ, clear=True, ACME_TEST='override'):
value = Value('default', environ_prefix='ACME')
self.assertEqual(value.setup('TEST'), 'override')
with patch.dict(os.environ, clear=True, TEST='override'):
value = Value('default', environ_prefix='')
self.assertEqual(value.setup('TEST'), 'override')
def test_boolean_values_true(self):
value = BooleanValue(False)
for truthy in value.true_values:
with env(DJANGO_TEST=truthy):
self.assertTrue(value.setup('TEST'))
def test_boolean_values_faulty(self):
self.assertRaises(ValueError, BooleanValue, 'false')
def test_boolean_values_false(self):
value = BooleanValue(True)
for falsy in value.false_values:
with env(DJANGO_TEST=falsy):
self.assertFalse(value.setup('TEST'))
def test_boolean_values_nonboolean(self):
value = BooleanValue(True)
with env(DJANGO_TEST='nonboolean'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_integer_values(self):
value = IntegerValue(1)<|fim▁hole|> self.assertEqual(value.setup('TEST'), 2)
with env(DJANGO_TEST='noninteger'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_float_values(self):
value = FloatValue(1.0)
with env(DJANGO_TEST='2.0'):
self.assertEqual(value.setup('TEST'), 2.0)
with env(DJANGO_TEST='noninteger'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_decimal_values(self):
value = DecimalValue(decimal.Decimal(1))
with env(DJANGO_TEST='2'):
self.assertEqual(value.setup('TEST'), decimal.Decimal(2))
with env(DJANGO_TEST='nondecimal'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_failing_caster(self):
self.assertRaises(ImproperlyConfigured, FailingCasterValue)
def test_list_values_default(self):
value = ListValue()
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), ['2', '2'])
with env(DJANGO_TEST='2, 2 ,'):
self.assertEqual(value.setup('TEST'), ['2', '2'])
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), [])
def test_list_values_separator(self):
value = ListValue(separator=':')
with env(DJANGO_TEST='/usr/bin:/usr/sbin:/usr/local/bin'):
self.assertEqual(value.setup('TEST'),
['/usr/bin', '/usr/sbin', '/usr/local/bin'])
def test_List_values_converter(self):
value = ListValue(converter=int)
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), [2, 2])
value = ListValue(converter=float)
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), [2.0, 2.0])
def test_list_values_custom_converter(self):
value = ListValue(converter=lambda x: x * 2)
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), ['22', '22'])
def test_list_values_converter_exception(self):
value = ListValue(converter=int)
with env(DJANGO_TEST='2,b'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_tuple_values_default(self):
value = TupleValue()
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), ('2', '2'))
with env(DJANGO_TEST='2, 2 ,'):
self.assertEqual(value.setup('TEST'), ('2', '2'))
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), ())
def test_set_values_default(self):
value = SetValue()
with env(DJANGO_TEST='2,2'):
self.assertEqual(value.setup('TEST'), set(['2', '2']))
with env(DJANGO_TEST='2, 2 ,'):
self.assertEqual(value.setup('TEST'), set(['2', '2']))
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), set())
def test_dict_values_default(self):
value = DictValue()
with env(DJANGO_TEST='{2: 2}'):
self.assertEqual(value.setup('TEST'), {2: 2})
expected = {2: 2, '3': '3', '4': [1, 2, 3]}
with env(DJANGO_TEST="{2: 2, '3': '3', '4': [1, 2, 3]}"):
self.assertEqual(value.setup('TEST'), expected)
with env(DJANGO_TEST="""{
2: 2,
'3': '3',
'4': [1, 2, 3],
}"""):
self.assertEqual(value.setup('TEST'), expected)
with env(DJANGO_TEST=''):
self.assertEqual(value.setup('TEST'), {})
with env(DJANGO_TEST='spam'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_email_values(self):
value = EmailValue('[email protected]')
with env(DJANGO_TEST='[email protected]'):
self.assertEqual(value.setup('TEST'), '[email protected]')
with env(DJANGO_TEST='spam'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_url_values(self):
value = URLValue('http://eggs.spam')
with env(DJANGO_TEST='http://spam.eggs'):
self.assertEqual(value.setup('TEST'), 'http://spam.eggs')
with env(DJANGO_TEST='httb://spam.eggs'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_ip_values(self):
value = IPValue('0.0.0.0')
with env(DJANGO_TEST='127.0.0.1'):
self.assertEqual(value.setup('TEST'), '127.0.0.1')
with env(DJANGO_TEST='::1'):
self.assertEqual(value.setup('TEST'), '::1')
with env(DJANGO_TEST='spam.eggs'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_regex_values(self):
value = RegexValue('000--000', regex=r'\d+--\d+')
with env(DJANGO_TEST='123--456'):
self.assertEqual(value.setup('TEST'), '123--456')
with env(DJANGO_TEST='123456'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_path_values_with_check(self):
value = PathValue()
with env(DJANGO_TEST='/'):
self.assertEqual(value.setup('TEST'), '/')
with env(DJANGO_TEST='~/'):
self.assertEqual(value.setup('TEST'), os.path.expanduser('~'))
with env(DJANGO_TEST='/does/not/exist'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_path_values_no_check(self):
value = PathValue(check_exists=False)
with env(DJANGO_TEST='/'):
self.assertEqual(value.setup('TEST'), '/')
with env(DJANGO_TEST='~/spam/eggs'):
self.assertEqual(value.setup('TEST'),
os.path.join(os.path.expanduser('~'),
'spam', 'eggs'))
with env(DJANGO_TEST='/does/not/exist'):
self.assertEqual(value.setup('TEST'), '/does/not/exist')
def test_secret_value(self):
self.assertRaises(ValueError, SecretValue, 'default')
value = SecretValue()
self.assertRaises(ValueError, value.setup, 'TEST')
with env(DJANGO_SECRET_KEY='123'):
self.assertEqual(value.setup('SECRET_KEY'), '123')
value = SecretValue(environ_name='FACEBOOK_API_SECRET',
environ_prefix=None)
self.assertRaises(ValueError, value.setup, 'TEST')
with env(FACEBOOK_API_SECRET='123'):
self.assertEqual(value.setup('TEST'), '123')
def test_database_url_value(self):
value = DatabaseURLValue()
self.assertEqual(value.default, {})
with env(DATABASE_URL='sqlite://'):
self.assertEqual(value.setup('DATABASE_URL'), {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'HOST': None,
'NAME': ':memory:',
'PASSWORD': None,
'PORT': None,
'USER': None,
}})
def test_email_url_value(self):
value = EmailURLValue()
self.assertEqual(value.default, {})
with env(EMAIL_URL='smtps://[email protected]:[email protected]:587'):
self.assertEqual(value.setup('EMAIL_URL'), {
'EMAIL_BACKEND': 'django.core.mail.backends.smtp.EmailBackend',
'EMAIL_FILE_PATH': '',
'EMAIL_HOST': 'smtp.example.com',
'EMAIL_HOST_PASSWORD': 'password',
'EMAIL_HOST_USER': '[email protected]',
'EMAIL_PORT': 587,
'EMAIL_USE_TLS': True})
with env(EMAIL_URL='console://'):
self.assertEqual(value.setup('EMAIL_URL'), {
'EMAIL_BACKEND': 'django.core.mail.backends.console.EmailBackend',
'EMAIL_FILE_PATH': '',
'EMAIL_HOST': None,
'EMAIL_HOST_PASSWORD': None,
'EMAIL_HOST_USER': None,
'EMAIL_PORT': None,
'EMAIL_USE_TLS': False})
with env(EMAIL_URL='smtps://[email protected]:[email protected]:wrong'):
self.assertRaises(ValueError, value.setup, 'TEST')
def test_cache_url_value(self):
cache_setting = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'KEY_PREFIX': '',
'LOCATION': 'user@host:port:1'
}
}
cache_url = 'redis://user@host:port/1'
value = CacheURLValue(cache_url)
self.assertEqual(value.default, cache_setting)
value = CacheURLValue()
self.assertEqual(value.default, {})
with env(CACHE_URL='redis://user@host:port/1'):
self.assertEqual(value.setup('CACHE_URL'), cache_setting)
with env(CACHE_URL='wrong://user@host:port/1'):
self.assertRaises(KeyError, value.setup, 'TEST')
def test_search_url_value(self):
value = SearchURLValue()
self.assertEqual(value.default, {})
with env(SEARCH_URL='elasticsearch://127.0.0.1:9200/index'):
self.assertEqual(value.setup('SEARCH_URL'), {
'default': {
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
'URL': 'http://127.0.0.1:9200',
'INDEX_NAME': 'index',
}})
def test_backend_list_value(self):
backends = ['django.middleware.common.CommonMiddleware']
value = BackendsValue(backends)
self.assertEqual(value.setup('TEST'), backends)
backends = ['non.existing.Backend']
self.assertRaises(ValueError, BackendsValue, backends)<|fim▁end|> | with env(DJANGO_TEST='2'): |
<|file_name|>test_crop_op.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
def crop(data, offsets, crop_shape):
def indexOf(shape, index):
result = []
for dim in reversed(shape):
result.append(index % dim)
index = index / dim
return result[::-1]
result = []
for i, value in enumerate(data.flatten()):
index = indexOf(data.shape, i)
selected = True
if len(index) == len(offsets):
for j, offset in enumerate(offsets):
selected = selected and index[j] >= offset and index[
j] < crop_shape[j] + offset
if selected:
result.append(value)
return np.array(result).reshape(crop_shape)
class TestCropOp(OpTest):
def setUp(self):
self.op_type = "crop"
self.crop_by_input = False
self.offset_by_input = False
self.attrs = {}
self.initTestCase()
if self.crop_by_input:
self.inputs = {
'X': np.random.random(self.x_shape).astype("float32"),
'Y': np.random.random(self.crop_shape).astype("float32")
}
else:
self.attrs['shape'] = self.crop_shape
self.inputs = {
'X': np.random.random(self.x_shape).astype("float32"),
}
if self.offset_by_input:
self.inputs['Offsets'] = np.array(self.offsets).astype('int32')
else:
self.attrs['offsets'] = self.offsets
self.outputs = {
'Out': crop(self.inputs['X'], self.offsets, self.crop_shape)
}
def initTestCase(self):
self.x_shape = (8, 8)
self.crop_shape = (2, 2)
self.offsets = [1, 2]
<|fim▁hole|> self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X'], 'Out', max_relative_error=0.006)
class TestCase1(TestCropOp):
def initTestCase(self):
self.x_shape = (16, 8, 32)
self.crop_shape = [2, 2, 3]
self.offsets = [1, 5, 3]
class TestCase2(TestCropOp):
def initTestCase(self):
self.x_shape = (4, 8)
self.crop_shape = [4, 8]
self.offsets = [0, 0]
class TestCase3(TestCropOp):
def initTestCase(self):
self.x_shape = (4, 8, 16)
self.crop_shape = [2, 2, 3]
self.offsets = [1, 5, 3]
self.crop_by_input = True
class TestCase4(TestCropOp):
def initTestCase(self):
self.x_shape = (4, 4)
self.crop_shape = [4, 4]
self.offsets = [0, 0]
self.crop_by_input = True
class TestCase5(TestCropOp):
def initTestCase(self):
self.x_shape = (3, 4, 5)
self.crop_shape = [2, 2, 3]
self.offsets = [1, 0, 2]
self.offset_by_input = True
class TestCase6(TestCropOp):
def initTestCase(self):
self.x_shape = (10, 9, 14)
self.crop_shape = [3, 3, 5]
self.offsets = [3, 5, 4]
self.crop_by_input = True
self.offset_by_input = True
if __name__ == '__main__':
unittest.main()<|fim▁end|> | def test_check_output(self): |
<|file_name|>0084_auto_20180215_0747.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-02-15 07:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0083_auto_20180209_1210'),
]
<|fim▁hole|> operations = [
migrations.RenameModel(
old_name='Facilitator',
new_name='Profile',
),
]<|fim▁end|> | |
<|file_name|>ui_more.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2006-2013, Alexis Royer, http://alexis.royer.free.fr/CLI
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the CLI library project nor the names of its contributors may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "cli/pch.h"
#include "cli/assert.h"
#include "cli/ui_more.h"
#include "cli/shell.h"
#include "cli/string_device.h"
#include "ui_text.h"
#include "command_line_edition.h"
CLI_NS_BEGIN(cli)
CLI_NS_BEGIN(ui)
More::More(const unsigned int UI_MaxLines, const unsigned int UI_MaxLineLength)
: UI(),
m_uiText(* new Text(UI_MaxLines, UI_MaxLineLength)), m_puiTextIt(NULL),
m_cliMoreLine(* new CmdLineEdition())
{
}
More::More(ExecutionContext& CLI_ParentContext, const unsigned int UI_MaxLines, const unsigned int UI_MaxLineLength)
: UI(CLI_ParentContext),
m_uiText(* new Text(UI_MaxLines, UI_MaxLineLength)), m_puiTextIt(NULL),
m_cliMoreLine(* new CmdLineEdition())
{
}
More::~More(void)
{
delete & m_uiText;
if (m_puiTextIt != NULL)
{
delete m_puiTextIt;
m_puiTextIt = NULL;
}
delete & m_cliMoreLine;
}
const OutputDevice& More::GetText(void)
{
return m_uiText;
}
void More::Reset(void)
{
<|fim▁hole|> }
void More::ResetToDefault(void)
{
// Very first display.
const OutputDevice::ScreenInfo cli_ScreenInfo = GetStream(OUTPUT_STREAM).GetScreenInfo();
if (m_puiTextIt == NULL)
{
m_puiTextIt = new TextIterator(cli_ScreenInfo, cli_ScreenInfo.GetSafeHeight() - 1);
}
CLI_ASSERT(m_puiTextIt != NULL);
if (m_puiTextIt != NULL)
{
m_uiText.Begin(*m_puiTextIt);
const StringDevice cli_Out(cli_ScreenInfo.GetSafeHeight() * (cli_ScreenInfo.GetSafeWidth() + 1), false);
m_uiText.PrintPage(*m_puiTextIt, cli_Out, false);
GetStream(OUTPUT_STREAM) << cli_Out.GetString();
}
ShowMoreMessage();
}
void More::OnKey(const KEY E_KeyCode)
{
// Ensure m_puiTextIt is valid.
CLI_ASSERT(m_puiTextIt != NULL);
if (m_puiTextIt == NULL) { Quit(); }
else {
switch (E_KeyCode)
{
case KEY_END:
HideMoreMessage();
// We are already at the bottom of a page, no need to optimize display so far, let it progress fluently.
// Optimize line by line at least.
for (bool b_LineDown = true; b_LineDown; )
{
const OutputDevice::ScreenInfo cli_ScreenInfo = GetStream(OUTPUT_STREAM).GetScreenInfo();
const StringDevice cli_Out(cli_ScreenInfo.GetSafeHeight() * (cli_ScreenInfo.GetSafeWidth() + 1), false);
b_LineDown = m_uiText.LineDown(*m_puiTextIt, & cli_Out);
GetStream(OUTPUT_STREAM) << cli_Out.GetString();
}
ShowMoreMessage();
break;
case PAGE_DOWN:
case SPACE:
// Print one more page
HideMoreMessage();
do {
// Output lines to a buffer output device.
const OutputDevice::ScreenInfo cli_ScreenInfo = GetStream(OUTPUT_STREAM).GetScreenInfo();
const StringDevice cli_Out(cli_ScreenInfo.GetSafeHeight() * (cli_ScreenInfo.GetSafeWidth() + 1), false);
m_uiText.PageDown(*m_puiTextIt, & cli_Out);
// Display in one call in order to optimize display.
GetStream(OUTPUT_STREAM) << cli_Out.GetString();
} while(0);
ShowMoreMessage();
break;
case KEY_DOWN:
case ENTER:
// Print one more line
HideMoreMessage();
m_uiText.LineDown(*m_puiTextIt, & GetStream(OUTPUT_STREAM));
ShowMoreMessage();
break;
case KEY_q:
case KEY_Q:
case ESCAPE:
case BREAK:
case LOGOUT:
case NULL_KEY:
// Stop display
Quit();
break;
default:
// Non managed character. Beep.
Beep();
break;
}
}
}
void More::ShowMoreMessage(void)
{
CLI_ASSERT(m_puiTextIt != NULL);
if (m_puiTextIt != NULL)
{
m_cliMoreLine.Reset();
TextIterator tmp(*m_puiTextIt);
if (m_uiText.LineDown(tmp, NULL))
{
// Still lines to display.
const ResourceString cli_MoreMessage = ResourceString()
.SetString(ResourceString::LANG_EN, "--- More ---")
.SetString(ResourceString::LANG_FR, "--- Plus ---");
m_cliMoreLine.Put(GetStream(OUTPUT_STREAM), cli_MoreMessage.GetString(GetLang()));
return;
}
}
// If the input text is done (or something wrong occured), terminate the UI execution.
Quit();
}
void More::HideMoreMessage(void)
{
m_cliMoreLine.CleanAll(GetStream(OUTPUT_STREAM));
}
void More::Quit(void)
{
HideMoreMessage();
EndControl(true);
}
CLI_NS_END(ui)
CLI_NS_END(cli)<|fim▁end|> | // Nothing to do.
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>__author__ = 'michogarcia'
from setuptools import setup, find_packages
version = '0.1'
setup(name='FullDiskAlert',
version=version,
author="Micho Garcia",
author_email="[email protected]",
license="LICENSE.txt",
description="Sends mail when disk is above threshold",
packages=find_packages(),
install_requires=[
'pyyaml',<|fim▁hole|> ],
)<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate piston; // piston core
extern crate graphics; // piston graphics
extern crate glutin_window; // opengl context creation
extern crate opengl_graphics; // opengl binding
//extern crate find_folder; // for finding our assets folder.
extern crate time;
extern crate rand;
extern crate ncollide; // 2d/3d/nd collision detection stuff<|fim▁hole|>
extern crate rustc_serialize; // for ai::nn
// for json
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
use piston::window::WindowSettings;
use piston::event_loop::*; // Generic eventloop
use piston::input::*;
use glutin_window::GlutinWindow as Window;
use opengl_graphics::{GlGraphics, OpenGL};
use nalgebra::{Vector2};
mod snake;
mod state;
mod input;
mod ai;
mod food;
mod geometry;
pub struct App {
gl: GlGraphics, // OpenGL drawing backend.
world_state: state::WorldState,
should_render: bool,
window_rect: Vector2<u32>,
}
impl App {
fn render(&mut self, args: &RenderArgs) {
if self.should_render {
self.window_rect = Vector2::new(args.width, args.height);
use graphics::*;
const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];
let world_state = &self.world_state;
let viewport = args.viewport();
self.gl
.draw(viewport, |c, gl| {
// Clear the screen.
clear(BLACK, gl);
for snake in &world_state.snakes {
snake.render(&c, gl, &args);
}
});
}
}
fn update(&mut self, args: &UpdateArgs) {
self.world_state.window_rect = self.window_rect;
self.world_state.update(args);
}
}
fn main() {
// Change this to OpenGL::V2_1 if not working.
let opengl = OpenGL::V3_2;
let (width, height) = (1280, 720);
// Create an Glutin window.
let mut window: Window = WindowSettings::new("Snake Game", [width, height])
.opengl(opengl)
.exit_on_esc(true)
.vsync(true)
.fullscreen(false)
.build()
.unwrap();
// Create a new game and run it.
let mut app: App = App {
gl: GlGraphics::new(opengl),
world_state: state::WorldState::default(),
should_render: true,
window_rect: Vector2::new(width, height),
};
// You can change these
app.world_state.speed = 20.0;
app.world_state.snake_length = 3;
for _ in 0..4 { // Try increasing this a lot and remove printlns.
let snake = snake::Snake::new(geometry::random_point_within(app.window_rect), 3, 20.0);
app.world_state.snakes.push(snake);
}
// Add 10 snakes. with default length 2 and width 10
//default: .max_fps(60).ups(120)
let mut events = Events::new(EventSettings::new()).max_fps(60).ups(120);
while let Some(e) = events.next(&mut window) {
if let Some(r) = e.render_args() {
app.render(&r);
}
if let Some(u) = e.update_args() {
// Simulate lag:
// std::thread::sleep(std::time::Duration::new(0, 1000_000_00));
app.update(&u);
}
}
}<|fim▁end|> | extern crate nalgebra; // has some neat matrices, vectors and points |
<|file_name|>RemoveBtn.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react'
import ActionDelete from 'material-ui/svg-icons/action/delete'
import { colors } from '/styles'
import moduleStyles from '/styles/fileTree'
const RemoveBtn = ({ onClick }) => (
<ActionDelete
onClick={onClick}
style={moduleStyles.listIcon.base}
color={colors.light}
hoverColor={colors.hover.red} />
)<|fim▁hole|><|fim▁end|> |
export default RemoveBtn |
<|file_name|>15.2.3.13-2-5.js<|end_file_name|><|fim▁begin|><|fim▁hole|>/*---
es5id: 15.2.3.13-2-5
description: Object.isExtensible returns true for all built-in objects (String)
---*/
var e = Object.isExtensible(String);
assert.sameValue(e, true, 'e');<|fim▁end|> | // Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
|
<|file_name|>RssLED.py<|end_file_name|><|fim▁begin|>import feedparser
import time
# Create display instance on default I2C address (0x70) and bus number.
from Adafruit_LED_Backpack import AlphaNum4
display = AlphaNum4.AlphaNum4()
# Initialize the display. Must be called once before using the display.
display.begin()
#create string(s) with rss address for multiple feeds
RssAddress = "http://feeds.reuters.com/Reuters/domesticNews"
#create feed caled Rss
Rss = feedparser.parse(RssAddress)
#Loop to iterate through all titles in feed sleeping for 1 second between printing
display.clear()
display.write_display()
#Loop through each title of feed
for i in Rss.entries:
#prints title to console
print (i.title)
#reset position to begining
pos = 0
#Change string to Uppercase for readability and add --* buffer to begining and end to distinguish titles
CapString = "---*" + i.title.upper() + "*---"
# Dashed line in console for aesthetics
print("----------------------------------------------------------------")
#Loop for scrolling through title
for x in range(0,len(CapString)-4):
# Print a 4 character string to the display buffer.
display.print_str(CapString[pos:pos+4])
# Write the display buffer to the hardware. This must be called to
# update the actual display LEDs.
display.write_display()
# Increment position. Wrap back to 0 when the end is reached.
pos += 1
if pos > len(CapString)-4:
pos = 0
# Delay for 0.15 of a second. This can be changed to speed up or slow down the scroll.
time.sleep(0.15)
# Clear out display
display.print_str(" ")<|fim▁hole|><|fim▁end|> | display.write_display() |
<|file_name|>ipam_non_pluggable_backend.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_db import exception as db_exc
from oslo_log import log as logging
from sqlalchemy import and_
from sqlalchemy import orm
from sqlalchemy.orm import exc
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import ipv6_utils
from neutron.db import ipam_backend_mixin
from neutron.db import models_v2
from neutron.ipam import requests as ipam_req
from neutron.ipam import subnet_alloc
LOG = logging.getLogger(__name__)
class IpamNonPluggableBackend(ipam_backend_mixin.IpamBackendMixin):
@staticmethod
def _generate_ip(context, subnets):
try:
return IpamNonPluggableBackend._try_generate_ip(context, subnets)
except n_exc.IpAddressGenerationFailure:
IpamNonPluggableBackend._rebuild_availability_ranges(context,
subnets)
return IpamNonPluggableBackend._try_generate_ip(context, subnets)
@staticmethod
def _try_generate_ip(context, subnets):
"""Generate an IP address.
The IP address will be generated from one of the subnets defined on
the network.
"""
range_qry = context.session.query(
models_v2.IPAvailabilityRange).join(
models_v2.IPAllocationPool).with_lockmode('update')
for subnet in subnets:
ip_range = range_qry.filter_by(subnet_id=subnet['id']).first()
if not ip_range:
LOG.debug("All IPs from subnet %(subnet_id)s (%(cidr)s) "
"allocated",
{'subnet_id': subnet['id'],
'cidr': subnet['cidr']})
continue
ip_address = ip_range['first_ip']
if ip_range['first_ip'] == ip_range['last_ip']:
# No more free indices on subnet => delete
LOG.debug("No more free IP's in slice. Deleting "
"allocation pool.")
context.session.delete(ip_range)
else:
# increment the first free
new_first_ip = str(netaddr.IPAddress(ip_address) + 1)
ip_range['first_ip'] = new_first_ip
LOG.debug("Allocated IP - %(ip_address)s from %(first_ip)s "
"to %(last_ip)s",
{'ip_address': ip_address,
'first_ip': ip_address,
'last_ip': ip_range['last_ip']})
return {'ip_address': ip_address,
'subnet_id': subnet['id']}
raise n_exc.IpAddressGenerationFailure(net_id=subnets[0]['network_id'])
@staticmethod
def _rebuild_availability_ranges(context, subnets):
"""Rebuild availability ranges.
This method is called only when there's no more IP available or by
_update_subnet_allocation_pools. Calling
_update_subnet_allocation_pools before calling this function deletes
the IPAllocationPools associated with the subnet that is updating,
which will result in deleting the IPAvailabilityRange too.
"""
ip_qry = context.session.query(
models_v2.IPAllocation).with_lockmode('update')
# PostgreSQL does not support select...for update with an outer join.
# No join is needed here.
pool_qry = context.session.query(
models_v2.IPAllocationPool).options(
orm.noload('available_ranges')).with_lockmode('update')
for subnet in sorted(subnets):
LOG.debug("Rebuilding availability ranges for subnet %s",
subnet)
# Create a set of all currently allocated addresses
ip_qry_results = ip_qry.filter_by(subnet_id=subnet['id'])
allocations = netaddr.IPSet([netaddr.IPAddress(i['ip_address'])
for i in ip_qry_results])
for pool in pool_qry.filter_by(subnet_id=subnet['id']):
# Create a set of all addresses in the pool
poolset = netaddr.IPSet(netaddr.IPRange(pool['first_ip'],
pool['last_ip']))
# Use set difference to find free addresses in the pool
available = poolset - allocations
# Generator compacts an ip set into contiguous ranges
def ipset_to_ranges(ipset):
first, last = None, None
for cidr in ipset.iter_cidrs():
if last and last + 1 != cidr.first:
yield netaddr.IPRange(first, last)
first = None
first, last = first if first else cidr.first, cidr.last
if first:
yield netaddr.IPRange(first, last)
# Write the ranges to the db
for ip_range in ipset_to_ranges(available):
available_range = models_v2.IPAvailabilityRange(
allocation_pool_id=pool['id'],
first_ip=str(netaddr.IPAddress(ip_range.first)),
last_ip=str(netaddr.IPAddress(ip_range.last)))
context.session.add(available_range)
@staticmethod
def _allocate_specific_ip(context, subnet_id, ip_address):
"""Allocate a specific IP address on the subnet."""
ip = int(netaddr.IPAddress(ip_address))
range_qry = context.session.query(
models_v2.IPAvailabilityRange).join(
models_v2.IPAllocationPool).with_lockmode('update')
results = range_qry.filter_by(subnet_id=subnet_id)
for ip_range in results:
first = int(netaddr.IPAddress(ip_range['first_ip']))
last = int(netaddr.IPAddress(ip_range['last_ip']))
if first <= ip <= last:
if first == last:
context.session.delete(ip_range)
return
elif first == ip:
new_first_ip = str(netaddr.IPAddress(ip_address) + 1)
ip_range['first_ip'] = new_first_ip
return
elif last == ip:
new_last_ip = str(netaddr.IPAddress(ip_address) - 1)
ip_range['last_ip'] = new_last_ip
return
else:
# Adjust the original range to end before ip_address
old_last_ip = ip_range['last_ip']
new_last_ip = str(netaddr.IPAddress(ip_address) - 1)
ip_range['last_ip'] = new_last_ip
# Create a new second range for after ip_address
new_first_ip = str(netaddr.IPAddress(ip_address) + 1)
new_ip_range = models_v2.IPAvailabilityRange(
allocation_pool_id=ip_range['allocation_pool_id'],
first_ip=new_first_ip,
last_ip=old_last_ip)
context.session.add(new_ip_range)
return
@staticmethod
def _check_unique_ip(context, network_id, subnet_id, ip_address):
"""Validate that the IP address on the subnet is not in use."""
ip_qry = context.session.query(models_v2.IPAllocation)
try:
ip_qry.filter_by(network_id=network_id,
subnet_id=subnet_id,
ip_address=ip_address).one()
except exc.NoResultFound:
return True
return False
def save_allocation_pools(self, context, subnet, allocation_pools):
for pool in allocation_pools:
first_ip = str(netaddr.IPAddress(pool.first, pool.version))
last_ip = str(netaddr.IPAddress(pool.last, pool.version))
ip_pool = models_v2.IPAllocationPool(subnet=subnet,
first_ip=first_ip,
last_ip=last_ip)
context.session.add(ip_pool)
ip_range = models_v2.IPAvailabilityRange(
ipallocationpool=ip_pool,
first_ip=first_ip,
last_ip=last_ip)
context.session.add(ip_range)
def allocate_ips_for_port_and_store(self, context, port, port_id):
network_id = port['port']['network_id']
ips = self._allocate_ips_for_port(context, port)
if ips:
for ip in ips:
ip_address = ip['ip_address']
subnet_id = ip['subnet_id']
self._store_ip_allocation(context, ip_address, network_id,
subnet_id, port_id)
def update_port_with_ips(self, context, db_port, new_port, new_mac):
changes = self.Changes(add=[], original=[], remove=[])
# Check if the IPs need to be updated
network_id = db_port['network_id']
if 'fixed_ips' in new_port:
original = self._make_port_dict(db_port, process_extensions=False)
changes = self._update_ips_for_port(
context, network_id,
original["fixed_ips"], new_port['fixed_ips'],
original['mac_address'], db_port['device_owner'])
# Update ips if necessary
for ip in changes.add:
IpamNonPluggableBackend._store_ip_allocation(
context, ip['ip_address'], network_id,
ip['subnet_id'], db_port.id)
self._update_db_port(context, db_port, new_port, network_id, new_mac)
return changes
def _test_fixed_ips_for_port(self, context, network_id, fixed_ips,
device_owner):
"""Test fixed IPs for port.
Check that configured subnets are valid prior to allocating any
IPs. Include the subnet_id in the result if only an IP address is
configured.
:raises: InvalidInput, IpAddressInUse, InvalidIpForNetwork,
InvalidIpForSubnet
"""
fixed_ip_set = []
for fixed in fixed_ips:
subnet = self._get_subnet_for_fixed_ip(context, fixed, network_id)
is_auto_addr_subnet = ipv6_utils.is_auto_address_subnet(subnet)
if 'ip_address' in fixed:
# Ensure that the IP's are unique
if not IpamNonPluggableBackend._check_unique_ip(
context, network_id,
subnet['id'], fixed['ip_address']):
raise n_exc.IpAddressInUse(net_id=network_id,
ip_address=fixed['ip_address'])
if (is_auto_addr_subnet and
device_owner not in
constants.ROUTER_INTERFACE_OWNERS):
msg = (_("IPv6 address %(address)s can not be directly "
"assigned to a port on subnet %(id)s since the "
"subnet is configured for automatic addresses") %
{'address': fixed['ip_address'],
'id': subnet['id']})
raise n_exc.InvalidInput(error_message=msg)
fixed_ip_set.append({'subnet_id': subnet['id'],
'ip_address': fixed['ip_address']})
else:
# A scan for auto-address subnets on the network is done
# separately so that all such subnets (not just those
# listed explicitly here by subnet ID) are associated
# with the port.
if (device_owner in constants.ROUTER_INTERFACE_OWNERS_SNAT or
not is_auto_addr_subnet):
fixed_ip_set.append({'subnet_id': subnet['id']})
self._validate_max_ips_per_port(fixed_ip_set)
return fixed_ip_set
def _allocate_fixed_ips(self, context, fixed_ips, mac_address):
"""Allocate IP addresses according to the configured fixed_ips."""
ips = []
# we need to start with entries that asked for a specific IP in case
# those IPs happen to be next in the line for allocation for ones that
# didn't ask for a specific IP
fixed_ips.sort(key=lambda x: 'ip_address' not in x)
for fixed in fixed_ips:
subnet = self._get_subnet(context, fixed['subnet_id'])
is_auto_addr = ipv6_utils.is_auto_address_subnet(subnet)
if 'ip_address' in fixed:
if not is_auto_addr:
# Remove the IP address from the allocation pool
IpamNonPluggableBackend._allocate_specific_ip(
context, fixed['subnet_id'], fixed['ip_address'])
ips.append({'ip_address': fixed['ip_address'],
'subnet_id': fixed['subnet_id']})
# Only subnet ID is specified => need to generate IP
# from subnet
else:
if is_auto_addr:
ip_address = self._calculate_ipv6_eui64_addr(context,
subnet,
mac_address)
ips.append({'ip_address': ip_address.format(),
'subnet_id': subnet['id']})
else:
subnets = [subnet]
# IP address allocation
result = self._generate_ip(context, subnets)
ips.append({'ip_address': result['ip_address'],
'subnet_id': result['subnet_id']})
return ips
def _update_ips_for_port(self, context, network_id, original_ips,
new_ips, mac_address, device_owner):
"""Add or remove IPs from the port."""
added = []
changes = self._get_changed_ips_for_port(context, original_ips,
new_ips, device_owner)
# Check if the IP's to add are OK
to_add = self._test_fixed_ips_for_port(context, network_id,
changes.add, device_owner)
for ip in changes.remove:
LOG.debug("Port update. Hold %s", ip)
IpamNonPluggableBackend._delete_ip_allocation(context,
network_id,
ip['subnet_id'],
ip['ip_address'])
if to_add:
LOG.debug("Port update. Adding %s", to_add)
added = self._allocate_fixed_ips(context, to_add, mac_address)
return self.Changes(add=added,
original=changes.original,
remove=changes.remove)
def _allocate_ips_for_port(self, context, port):
"""Allocate IP addresses for the port.
If port['fixed_ips'] is set to 'ATTR_NOT_SPECIFIED', allocate IP
addresses for the port. If port['fixed_ips'] contains an IP address or
a subnet_id then allocate an IP address accordingly.
"""<|fim▁hole|> ips = []
v6_stateless = []
net_id_filter = {'network_id': [p['network_id']]}
subnets = self._get_subnets(context, filters=net_id_filter)
is_router_port = (
p['device_owner'] in constants.ROUTER_INTERFACE_OWNERS_SNAT)
fixed_configured = p['fixed_ips'] is not attributes.ATTR_NOT_SPECIFIED
if fixed_configured:
configured_ips = self._test_fixed_ips_for_port(context,
p["network_id"],
p['fixed_ips'],
p['device_owner'])
ips = self._allocate_fixed_ips(context,
configured_ips,
p['mac_address'])
# For ports that are not router ports, implicitly include all
# auto-address subnets for address association.
if not is_router_port:
v6_stateless += [subnet for subnet in subnets
if ipv6_utils.is_auto_address_subnet(subnet)]
else:
# Split into v4, v6 stateless and v6 stateful subnets
v4 = []
v6_stateful = []
for subnet in subnets:
if subnet['ip_version'] == 4:
v4.append(subnet)
elif ipv6_utils.is_auto_address_subnet(subnet):
if not is_router_port:
v6_stateless.append(subnet)
else:
v6_stateful.append(subnet)
version_subnets = [v4, v6_stateful]
for subnets in version_subnets:
if subnets:
result = IpamNonPluggableBackend._generate_ip(context,
subnets)
ips.append({'ip_address': result['ip_address'],
'subnet_id': result['subnet_id']})
for subnet in v6_stateless:
# IP addresses for IPv6 SLAAC and DHCPv6-stateless subnets
# are implicitly included.
ip_address = self._calculate_ipv6_eui64_addr(context, subnet,
p['mac_address'])
ips.append({'ip_address': ip_address.format(),
'subnet_id': subnet['id']})
return ips
def add_auto_addrs_on_network_ports(self, context, subnet, ipam_subnet):
"""For an auto-address subnet, add addrs for ports on the net."""
with context.session.begin(subtransactions=True):
network_id = subnet['network_id']
port_qry = context.session.query(models_v2.Port)
ports = port_qry.filter(
and_(models_v2.Port.network_id == network_id,
~models_v2.Port.device_owner.in_(
constants.ROUTER_INTERFACE_OWNERS_SNAT)))
for port in ports:
ip_address = self._calculate_ipv6_eui64_addr(
context, subnet, port['mac_address'])
allocated = models_v2.IPAllocation(network_id=network_id,
port_id=port['id'],
ip_address=ip_address,
subnet_id=subnet['id'])
try:
# Do the insertion of each IP allocation entry within
# the context of a nested transaction, so that the entry
# is rolled back independently of other entries whenever
# the corresponding port has been deleted.
with context.session.begin_nested():
context.session.add(allocated)
except db_exc.DBReferenceError:
LOG.debug("Port %s was deleted while updating it with an "
"IPv6 auto-address. Ignoring.", port['id'])
def _calculate_ipv6_eui64_addr(self, context, subnet, mac_addr):
prefix = subnet['cidr']
network_id = subnet['network_id']
ip_address = ipv6_utils.get_ipv6_addr_by_EUI64(
prefix, mac_addr).format()
if not self._check_unique_ip(context, network_id,
subnet['id'], ip_address):
raise n_exc.IpAddressInUse(net_id=network_id,
ip_address=ip_address)
return ip_address
def allocate_subnet(self, context, network, subnet, subnetpool_id):
subnetpool = None
if subnetpool_id:
subnetpool = self._get_subnetpool(context, subnetpool_id)
self._validate_ip_version_with_subnetpool(subnet, subnetpool)
# gateway_ip and allocation pools should be validated or generated
# only for specific request
if subnet['cidr'] is not attributes.ATTR_NOT_SPECIFIED:
subnet['gateway_ip'] = self._gateway_ip_str(subnet,
subnet['cidr'])
# allocation_pools are converted to list of IPRanges
subnet['allocation_pools'] = self._prepare_allocation_pools(
subnet['allocation_pools'],
subnet['cidr'],
subnet['gateway_ip'])
subnet_request = ipam_req.SubnetRequestFactory.get_request(context,
subnet,
subnetpool)
if subnetpool_id:
driver = subnet_alloc.SubnetAllocator(subnetpool, context)
ipam_subnet = driver.allocate_subnet(subnet_request)
subnet_request = ipam_subnet.get_details()
subnet = self._save_subnet(context,
network,
self._make_subnet_args(
subnet_request,
subnet,
subnetpool_id),
subnet['dns_nameservers'],
subnet['host_routes'],
subnet_request)
# ipam_subnet is not expected to be allocated for non pluggable ipam,
# so just return None for it (second element in returned tuple)
return subnet, None<|fim▁end|> | p = port['port'] |
<|file_name|>dbw_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import csv
import rospy
from std_msgs.msg import Bool
from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd, SteeringReport
'''
You can use this file to test your DBW code against a bag recorded with a reference implementation.
The bag can be found at https://s3-us-west-1.amazonaws.com/udacity-selfdrivingcar/files/reference.bag.zip
To use the downloaded bag file, rename it to 'dbw_test.rosbag.bag' and place it in the CarND-Capstone/data folder.
Then with roscore running, you can then use roslaunch with the dbw_test.launch file found in
<project_repo>/ros/src/twist_controller/launch.
This file will produce 3 csv files which you can process to figure out how your DBW node is
performing on various commands.
`/actual/*` are commands from the recorded bag while `/vehicle/*` are the output of your node.
'''
class DBWTestNode(object):
def __init__(self):
rospy.init_node('dbw_test_node')
rospy.Subscriber('/vehicle/steering_cmd', SteeringCmd, self.steer_cb)
rospy.Subscriber('/vehicle/throttle_cmd', ThrottleCmd, self.throttle_cb)
rospy.Subscriber('/vehicle/brake_cmd', BrakeCmd, self.brake_cb)
rospy.Subscriber('/actual/steering_cmd', SteeringCmd, self.actual_steer_cb)
rospy.Subscriber('/actual/throttle_cmd', ThrottleCmd, self.actual_throttle_cb)
rospy.Subscriber('/actual/brake_cmd', BrakeCmd, self.actual_brake_cb)
rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb)
self.steer = self.throttle = self.brake = None
self.steer_data = []
self.throttle_data = []
self.brake_data = []
self.dbw_enabled = False
base_path = os.path.dirname(os.path.abspath(__file__))
self.steerfile = os.path.join(base_path, 'steers.csv')
self.throttlefile = os.path.join(base_path, 'throttles.csv')
self.brakefile = os.path.join(base_path, 'brakes.csv')
self.loop()
def loop(self):
rate = rospy.Rate(10) # 10Hz
while not rospy.is_shutdown():
rate.sleep()
fieldnames = ['actual', 'proposed']
with open(self.steerfile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()<|fim▁hole|> writer.writeheader()
writer.writerows(self.throttle_data)
with open(self.brakefile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(self.brake_data)
def dbw_enabled_cb(self, msg):
self.dbw_enabled = msg.data
def steer_cb(self, msg):
self.steer = msg.steering_wheel_angle_cmd
def throttle_cb(self, msg):
self.throttle = msg.pedal_cmd
def brake_cb(self, msg):
self.brake = msg.pedal_cmd
def actual_steer_cb(self, msg):
if self.dbw_enabled and self.steer is not None:
self.steer_data.append({'actual': msg.steering_wheel_angle_cmd,
'proposed': self.steer})
self.steer = None
def actual_throttle_cb(self, msg):
if self.dbw_enabled and self.throttle is not None:
self.throttle_data.append({'actual': msg.pedal_cmd,
'proposed': self.throttle})
self.throttle = None
def actual_brake_cb(self, msg):
if self.dbw_enabled and self.brake is not None:
self.brake_data.append({'actual': msg.pedal_cmd,
'proposed': self.brake})
self.brake = None
if __name__ == '__main__':
DBWTestNode()<|fim▁end|> | writer.writerows(self.steer_data)
with open(self.throttlefile, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames) |
<|file_name|>bitcoin_es_CL.ts<|end_file_name|><|fim▁begin|><TS language="es_CL" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Haga clic para editar la dirección o etiqueta</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Crea una nueva dirección</translation>
</message>
<message>
<source>&New</source>
<translation>y nueva</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la dirección seleccionada al portapapeles</translation>
</message>
<message>
<source>&Copy</source>
<translation>y copiar</translation>
</message>
<message>
<source>C&lose</source>
<translation>C y perder</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Eliminar la dirección seleccionada de la lista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la pestaña actual a un archivo</translation>
</message>
<message>
<source>&Export</source>
<translation>y exportar</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Borrar</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Enter passphrase</source>
<translation>Introduce contraseña actual </translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nueva contraseña</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Repite nueva contraseña</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Firmar &Mensaje...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Vista general</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Muestra una vista general de la billetera</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transacciones</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Explora el historial de transacciónes</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Salir</translation>
</message>
<message>
<source>Quit application</source>
<translation>Salir del programa</translation>
</message>
<message>
<source>&About %1</source>
<translation>S&obre %1</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Acerca de</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Mostrar Información sobre Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opciones</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Codificar la billetera...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Respaldar billetera...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Cambiar la contraseña...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>Mandando direcciones</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>Recibiendo direcciones</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Abrir y url...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Cargando el index de bloques...</translation>
</message>
<message>
<source>Send coins to a Chancoin address</source>
<translation>Enviar monedas a una dirección chancoin</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Respaldar billetera en otra ubicación</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambiar la contraseña utilizada para la codificación de la billetera</translation>
</message>
<message>
<source>&Debug window</source>
<translation>Ventana &Debug</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Abre consola de depuración y diagnóstico</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>Verificar mensaje....</translation>
</message>
<message>
<source>Chancoin</source>
<translation>Chancoin</translation>
</message>
<message>
<source>Wallet</source>
<translation>Cartera</translation>
</message>
<message>
<source>&Send</source>
<translation>&Envía</translation>
</message>
<message>
<source>&Receive</source>
<translation>y recibir</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Mostrar/Ocultar</translation>
</message>
<message>
<source>Sign messages with your Chancoin addresses to prove you own them</source>
<translation>Firmar un mensaje para provar que usted es dueño de esta dirección</translation>
</message>
<message>
<source>&File</source>
<translation>&Archivo</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Configuración</translation>
</message>
<message>
<source>&Help</source>
<translation>&Ayuda</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Barra de pestañas</translation>
</message>
<message>
<source>Request payments (generates QR codes and chancoin: URIs)</source>
<translation>Pide pagos (genera codigos QR and chancoin: URls)</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>Warning</source>
<translation>Atención</translation>
</message>
<message>
<source>Information</source>
<translation>Información</translation>
</message>
<message>
<source>Up to date</source>
<translation>Actualizado</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Recuperando...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Transacción enviada</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Transacción entrante</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>desbloqueda</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>bloqueda</b></translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Fee:</source>
<translation>comisión:
</translation>
</message>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Confirmaciones</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Editar dirección</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<source>&Address</source>
<translation>&Dirección</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>name</source>
<translation>Nombre</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>versión</translation>
</message>
<message>
<source>Command-line options</source>
<translation>opciones de linea de comando</translation>
</message>
<message>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>bienvenido</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
<context>
<name>ModalOverlay</name>
<message>
<source>Form</source>
<translation>Formulario</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>URI:</source>
<translation>url:</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opciones</translation>
</message>
<message>
<source>&Main</source>
<translation>&Principal</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Reestablece todas las opciones.</translation>
</message>
<message>
<source>&Network</source>
<translation>&Red</translation>
</message>
<message>
<source>W&allet</source>
<translation>Cartera</translation>
</message>
<message>
<source>Expert</source>
<translation>experto</translation>
</message>
<message>
<source>Automatically open the Chancoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Abre automáticamente el puerto del cliente Chancoin en el router. Esto funciona solo cuando tu router es compatible con UPnP y está habilitado.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Direcciona el puerto usando &UPnP</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>&IP Proxy:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Puerto:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Puerto del servidor proxy (ej. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>y windows
</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Muestra solo un ícono en la bandeja después de minimizar la ventana</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimiza a la bandeja en vez de la barra de tareas</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimiza a la bandeja al cerrar</translation>
</message>
<message>
<source>&Display</source>
<translation>&Mostrado</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Unidad en la que mostrar cantitades:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Elige la subdivisión por defecto para mostrar cantidaded en la interfaz cuando se envien monedas</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Cancela</translation>
</message>
<message>
<source>default</source>
<translation>predeterminado</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Confirmar reestablecimiento de las opciones</translation>
</message>
</context>
<context><|fim▁hole|> <message>
<source>Form</source>
<translation>Formulario</translation>
</message>
<message>
<source>Total:</source>
<translation>Total:</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 y %2</translation>
</message>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<source>Client version</source>
<translation>Versión del Cliente</translation>
</message>
<message>
<source>&Information</source>
<translation>&Información</translation>
</message>
<message>
<source>Debug window</source>
<translation>Ventana Debug</translation>
</message>
<message>
<source>General</source>
<translation>General</translation>
</message>
<message>
<source>Startup time</source>
<translation>Tiempo de inicio</translation>
</message>
<message>
<source>Network</source>
<translation>Red</translation>
</message>
<message>
<source>Name</source>
<translation>Nombre</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Número de conexiones</translation>
</message>
<message>
<source>Block chain</source>
<translation>Bloquea cadena</translation>
</message>
<message>
<source>Version</source>
<translation>version
</translation>
</message>
<message>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<source>&Console</source>
<translation>&Consola</translation>
</message>
<message>
<source>Totals</source>
<translation>Total:</translation>
</message>
<message>
<source>Clear console</source>
<translation>Limpiar Consola</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&mensaje</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>Código QR </translation>
</message>
<message>
<source>Copy &Address</source>
<translation>&Copia dirección</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>Guardar imagen...</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Fondos insuficientes</translation>
</message>
<message>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Fee:</source>
<translation>comisión:
</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Comisión transacción:</translation>
</message>
<message>
<source>normal</source>
<translation>normal</translation>
</message>
<message>
<source>fast</source>
<translation>rapido</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Enviar a múltiples destinatarios</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>&Agrega destinatario</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Confirma el envio</translation>
</message>
<message>
<source>S&end</source>
<translation>&Envía</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>&Pagar a:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Message:</source>
<translation>Mensaje:</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Pagar a:</translation>
</message>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>&Sign Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Escriba el mensaje que desea firmar</translation>
</message>
<message>
<source>Signature</source>
<translation>Firma</translation>
</message>
<message>
<source>Sign the message to prove you own this Chancoin address</source>
<translation>Firmar un mensjage para probar que usted es dueño de esta dirección</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Firmar Mensaje</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>&Firmar Mensaje</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>[testnet]</source>
<translation>[red-de-pruebas]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Esta ventana muestra información detallada sobre la transacción</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
</context>
<context>
<name>TransactionView</name>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Opciones:
</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Especifica directorio para los datos
</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceptar comandos consola y JSON-RPC
</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Correr como demonio y acepta comandos
</translation>
</message>
<message>
<source>Chancoin Core</source>
<translation>chancoin core</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Error cargando blkindex.dat</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Atención: Poco espacio en el disco duro</translation>
</message>
<message>
<source>Information</source>
<translation>Información</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Enviar informacion de seguimiento a la consola en vez del archivo debug.log</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Usuario para las conexiones JSON-RPC
</translation>
</message>
<message>
<source>Warning</source>
<translation>Atención</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Contraseña para las conexiones JSON-RPC
</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permite búsqueda DNS para addnode y connect
</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Cargando direcciónes...</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Dirección -proxy invalida: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Fondos insuficientes</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Cargando el index de bloques...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Agrega un nodo para conectarse and attempt to keep the connection open</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Cargando cartera...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>No es posible desactualizar la billetera</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>No se pudo escribir la dirección por defecto</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Rescaneando...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Carga completa</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
</TS><|fim▁end|> | <name>OverviewPage</name> |
<|file_name|>genericcomicreader.py<|end_file_name|><|fim▁begin|><|fim▁hole|># This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import re
from weboob.capabilities.gallery import ICapGallery, BaseGallery, BaseImage
from weboob.tools.backend import BaseBackend
from weboob.tools.browser import BaseBrowser, BasePage
__all__ = ['GenericComicReaderBackend']
class DisplayPage(BasePage):
def get_page(self, gallery):
src = self.document.xpath(self.browser.params['img_src_xpath'])[0]
return BaseImage(src,
gallery=gallery,
url=src)
def page_list(self):
return self.document.xpath(self.browser.params['page_list_xpath'])
class GenericComicReaderBrowser(BaseBrowser):
def __init__(self, browser_params, *args, **kwargs):
self.params = browser_params
BaseBrowser.__init__(self, *args, **kwargs)
def iter_gallery_images(self, gallery):
self.location(gallery.url)
assert self.is_on_page(DisplayPage)
for p in self.page.page_list():
if 'page_to_location' in self.params:
self.location(self.params['page_to_location'] % p)
else:
self.location(p)
assert self.is_on_page(DisplayPage)
yield self.page.get_page(gallery)
def fill_image(self, image, fields):
if 'data' in fields:
image.data = self.readurl(image.url)
class GenericComicReaderBackend(BaseBackend, ICapGallery):
NAME = 'genericcomicreader'
MAINTAINER = u'Noé Rubinstein'
EMAIL = '[email protected]'
VERSION = '0.f'
DESCRIPTION = 'Generic comic reader backend; subclasses implement specific sites'
LICENSE = 'AGPLv3+'
BROWSER = GenericComicReaderBrowser
BROWSER_PARAMS = {}
ID_REGEXP = None
URL_REGEXP = None
ID_TO_URL = None
PAGES = {}
def create_default_browser(self):
b = self.create_browser(self.BROWSER_PARAMS)
b.PAGES = self.PAGES
try:
b.DOMAIN = self.DOMAIN
except AttributeError:
pass
return b
def iter_gallery_images(self, gallery):
with self.browser:
return self.browser.iter_gallery_images(gallery)
def get_gallery(self, _id):
match = re.match(r'^%s$' % self.URL_REGEXP, _id)
if match:
_id = match.group(1)
else:
match = re.match(r'^%s$' % self.ID_REGEXP, _id)
if match:
_id = match.group(0)
else:
return None
gallery = BaseGallery(_id, url=(self.ID_TO_URL % _id))
with self.browser:
return gallery
def fill_gallery(self, gallery, fields):
gallery.title = gallery.id
def fill_image(self, image, fields):
with self.browser:
self.browser.fill_image(image, fields)
OBJECTS = {
BaseGallery: fill_gallery,
BaseImage: fill_image}<|fim▁end|> | # -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Noé Rubinstein
# |
<|file_name|>ReliabilityTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* This class tests reliability of the framework in the face of failures of
* both tasks and tasktrackers. Steps:
* 1) Get the cluster status
* 2) Get the number of slots in the cluster
* 3) Spawn a sleepjob that occupies the entire cluster (with two waves of maps)
* 4) Get the list of running attempts for the job
* 5) Fail a few of them
* 6) Now fail a few trackers (ssh)
* 7) Job should run to completion
* 8) The above is repeated for the Sort suite of job (randomwriter, sort,
* validator). All jobs must complete, and finally, the sort validation
* should succeed.
* To run the test:
* ./bin/hadoop --config <config> jar
* build/hadoop-<version>-test.jar MRReliabilityTest -libjars
* build/hadoop-<version>-examples.jar [-scratchdir <dir>]"
*
* The scratchdir is optional and by default the current directory on the client
* will be used as the scratch space. Note that password-less SSH must be set up
* between the client machine from where the test is submitted, and the cluster
* nodes where the test runs.
*
* The test should be run on a <b>free</b> cluster where there is no other parallel
* job submission going on. Submission of other jobs while the test runs can cause
* the tests/jobs submitted to fail.
*/
public class ReliabilityTest extends Configured implements Tool {
private String dir;
private static final Log LOG = LogFactory.getLog(ReliabilityTest.class);
private void displayUsage() {
LOG.info("This must be run in only the distributed mode " +
"(LocalJobRunner not supported).\n\tUsage: MRReliabilityTest " +
"-libjars <path to hadoop-examples.jar> [-scratchdir <dir>]" +
"\n[-scratchdir] points to a scratch space on this host where temp" +
" files for this test will be created. Defaults to current working" +
" dir. \nPasswordless SSH must be set up between this host and the" +
" nodes which the test is going to use.\n"+
"The test should be run on a free cluster with no parallel job submission" +
" going on, as the test requires to restart TaskTrackers and kill tasks" +
" any job submission while the tests are running can cause jobs/tests to fail");
System.exit(-1);
}
public int run(String[] args) throws Exception {
Configuration conf = getConf();
if ("local".equals(conf.get("mapred.job.tracker", "local"))) {
displayUsage();
}
String[] otherArgs =
new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length == 2) {
if (otherArgs[0].equals("-scratchdir")) {
dir = otherArgs[1];
} else {
displayUsage();
}
}
else if (otherArgs.length == 0) {
dir = System.getProperty("user.dir");
} else {
displayUsage();
}
<|fim▁hole|> //fail, set some high values for the max attempts
conf.setInt("mapred.map.max.attempts", 10);
conf.setInt("mapred.reduce.max.attempts", 10);
runSleepJobTest(new JobClient(new JobConf(conf)), conf);
runSortJobTests(new JobClient(new JobConf(conf)), conf);
return 0;
}
private void runSleepJobTest(final JobClient jc, final Configuration conf)
throws Exception {
ClusterStatus c = jc.getClusterStatus();
int maxMaps = c.getMaxMapTasks() * 2;
int maxReduces = maxMaps;
int mapSleepTime = (int)c.getTTExpiryInterval();
int reduceSleepTime = mapSleepTime;
String[] sleepJobArgs = new String[] {
"-m", Integer.toString(maxMaps),
"-r", Integer.toString(maxReduces),
"-mt", Integer.toString(mapSleepTime),
"-rt", Integer.toString(reduceSleepTime)};
runTest(jc, conf, "org.apache.hadoop.examples.SleepJob", sleepJobArgs,
new KillTaskThread(jc, 2, 0.2f, false, 2),
new KillTrackerThread(jc, 2, 0.4f, false, 1));
LOG.info("SleepJob done");
}
private void runSortJobTests(final JobClient jc, final Configuration conf)
throws Exception {
String inputPath = "my_reliability_test_input";
String outputPath = "my_reliability_test_output";
FileSystem fs = jc.getFs();
fs.delete(new Path(inputPath), true);
fs.delete(new Path(outputPath), true);
runRandomWriterTest(jc, conf, inputPath);
runSortTest(jc, conf, inputPath, outputPath);
runSortValidatorTest(jc, conf, inputPath, outputPath);
}
private void runRandomWriterTest(final JobClient jc,
final Configuration conf, final String inputPath)
throws Exception {
runTest(jc, conf, "org.apache.hadoop.examples.RandomWriter",
new String[]{inputPath},
null, new KillTrackerThread(jc, 0, 0.4f, false, 1));
LOG.info("RandomWriter job done");
}
private void runSortTest(final JobClient jc, final Configuration conf,
final String inputPath, final String outputPath)
throws Exception {
runTest(jc, conf, "org.apache.hadoop.examples.Sort",
new String[]{inputPath, outputPath},
new KillTaskThread(jc, 2, 0.2f, false, 2),
new KillTrackerThread(jc, 2, 0.8f, false, 1));
LOG.info("Sort job done");
}
private void runSortValidatorTest(final JobClient jc,
final Configuration conf, final String inputPath, final String outputPath)
throws Exception {
runTest(jc, conf, "org.apache.hadoop.mapred.SortValidator", new String[] {
"-sortInput", inputPath, "-sortOutput", outputPath},
new KillTaskThread(jc, 2, 0.2f, false, 1),
new KillTrackerThread(jc, 2, 0.8f, false, 1));
LOG.info("SortValidator job done");
}
private String normalizeCommandPath(String command) {
final String hadoopHome;
if ((hadoopHome = System.getenv("HADOOP_HOME")) != null) {
command = hadoopHome + "/" + command;
}
return command;
}
private void checkJobExitStatus(int status, String jobName) {
if (status != 0) {
LOG.info(jobName + " job failed with status: " + status);
System.exit(status);
} else {
LOG.info(jobName + " done.");
}
}
//Starts the job in a thread. It also starts the taskKill/tasktrackerKill
//threads.
private void runTest(final JobClient jc, final Configuration conf,
final String jobClass, final String[] args, KillTaskThread killTaskThread,
KillTrackerThread killTrackerThread) throws Exception {
Thread t = new Thread("Job Test") {
public void run() {
try {
Class<?> jobClassObj = conf.getClassByName(jobClass);
int status = ToolRunner.run(conf, (Tool)(jobClassObj.newInstance()),
args);
checkJobExitStatus(status, jobClass);
} catch (Exception e) {
LOG.fatal("JOB " + jobClass + " failed to run");
System.exit(-1);
}
}
};
t.setDaemon(true);
t.start();
JobStatus[] jobs;
//get the job ID. This is the job that we just submitted
while ((jobs = jc.jobsToComplete()).length == 0) {
LOG.info("Waiting for the job " + jobClass +" to start");
Thread.sleep(1000);
}
JobID jobId = jobs[jobs.length - 1].getJobID();
RunningJob rJob = jc.getJob(jobId);
if(rJob.isComplete()) {
LOG.error("The last job returned by the querying JobTracker is complete :" +
rJob.getJobID() + " .Exiting the test");
System.exit(-1);
}
while (rJob.getJobState() == JobStatus.PREP) {
LOG.info("JobID : " + jobId + " not started RUNNING yet");
Thread.sleep(1000);
rJob = jc.getJob(jobId);
}
if (killTaskThread != null) {
killTaskThread.setRunningJob(rJob);
killTaskThread.start();
killTaskThread.join();
LOG.info("DONE WITH THE TASK KILL/FAIL TESTS");
}
if (killTrackerThread != null) {
killTrackerThread.setRunningJob(rJob);
killTrackerThread.start();
killTrackerThread.join();
LOG.info("DONE WITH THE TESTS TO DO WITH LOST TASKTRACKERS");
}
t.join();
}
private class KillTrackerThread extends Thread {
private volatile boolean killed = false;
private JobClient jc;
private RunningJob rJob;
final private int thresholdMultiplier;
private float threshold = 0.2f;
private boolean onlyMapsProgress;
private int numIterations;
final private String slavesFile = dir + "/_reliability_test_slaves_file_";
final String shellCommand = normalizeCommandPath("bin/slaves.sh");
final private String STOP_COMMAND = "ps uwwx | grep java | grep " +
"org.apache.hadoop.mapred.TaskTracker"+ " |" +
" grep -v grep | tr -s ' ' | cut -d ' ' -f2 | xargs kill -s STOP";
final private String RESUME_COMMAND = "ps uwwx | grep java | grep " +
"org.apache.hadoop.mapred.TaskTracker"+ " |" +
" grep -v grep | tr -s ' ' | cut -d ' ' -f2 | xargs kill -s CONT";
//Only one instance must be active at any point
public KillTrackerThread(JobClient jc, int threshaldMultiplier,
float threshold, boolean onlyMapsProgress, int numIterations) {
this.jc = jc;
this.thresholdMultiplier = threshaldMultiplier;
this.threshold = threshold;
this.onlyMapsProgress = onlyMapsProgress;
this.numIterations = numIterations;
setDaemon(true);
}
public void setRunningJob(RunningJob rJob) {
this.rJob = rJob;
}
public void kill() {
killed = true;
}
public void run() {
stopStartTrackers(true);
if (!onlyMapsProgress) {
stopStartTrackers(false);
}
}
private void stopStartTrackers(boolean considerMaps) {
if (considerMaps) {
LOG.info("Will STOP/RESUME tasktrackers based on Maps'" +
" progress");
} else {
LOG.info("Will STOP/RESUME tasktrackers based on " +
"Reduces' progress");
}
LOG.info("Initial progress threshold: " + threshold +
". Threshold Multiplier: " + thresholdMultiplier +
". Number of iterations: " + numIterations);
float thresholdVal = threshold;
int numIterationsDone = 0;
while (!killed) {
try {
float progress;
if (jc.getJob(rJob.getID()).isComplete() ||
numIterationsDone == numIterations) {
break;
}
if (considerMaps) {
progress = jc.getJob(rJob.getID()).mapProgress();
} else {
progress = jc.getJob(rJob.getID()).reduceProgress();
}
if (progress >= thresholdVal) {
numIterationsDone++;
ClusterStatus c;
stopTaskTrackers((c = jc.getClusterStatus(true)));
Thread.sleep((int)Math.ceil(1.5 * c.getTTExpiryInterval()));
startTaskTrackers();
thresholdVal = thresholdVal * thresholdMultiplier;
}
Thread.sleep(5000);
} catch (InterruptedException ie) {
killed = true;
return;
} catch (Exception e) {
LOG.fatal(StringUtils.stringifyException(e));
}
}
}
private void stopTaskTrackers(ClusterStatus c) throws Exception {
Collection <String> trackerNames = c.getActiveTrackerNames();
ArrayList<String> trackerNamesList = new ArrayList<String>(trackerNames);
Collections.shuffle(trackerNamesList);
int count = 0;
FileOutputStream fos = new FileOutputStream(new File(slavesFile));
LOG.info(new Date() + " Stopping a few trackers");
for (String tracker : trackerNamesList) {
String host = convertTrackerNameToHostName(tracker);
LOG.info(new Date() + " Marking tracker on host: " + host);
fos.write((host + "\n").getBytes());
if (count++ >= trackerNamesList.size()/2) {
break;
}
}
fos.close();
runOperationOnTT("suspend");
}
private void startTaskTrackers() throws Exception {
LOG.info(new Date() + " Resuming the stopped trackers");
runOperationOnTT("resume");
new File(slavesFile).delete();
}
private void runOperationOnTT(String operation) throws IOException {
Map<String,String> hMap = new HashMap<String,String>();
hMap.put("HADOOP_SLAVES", slavesFile);
StringTokenizer strToken;
if (operation.equals("suspend")) {
strToken = new StringTokenizer(STOP_COMMAND, " ");
} else {
strToken = new StringTokenizer(RESUME_COMMAND, " ");
}
String commandArgs[] = new String[strToken.countTokens() + 1];
int i = 0;
commandArgs[i++] = shellCommand;
while (strToken.hasMoreTokens()) {
commandArgs[i++] = strToken.nextToken();
}
String output = Shell.execCommand(hMap, commandArgs);
if (output != null && !output.equals("")) {
LOG.info(output);
}
}
private String convertTrackerNameToHostName(String trackerName) {
// Convert the trackerName to it's host name
int indexOfColon = trackerName.indexOf(":");
String trackerHostName = (indexOfColon == -1) ?
trackerName :
trackerName.substring(0, indexOfColon);
return trackerHostName.substring("tracker_".length());
}
}
private class KillTaskThread extends Thread {
private volatile boolean killed = false;
private RunningJob rJob;
private JobClient jc;
final private int thresholdMultiplier;
private float threshold = 0.2f;
private boolean onlyMapsProgress;
private int numIterations;
public KillTaskThread(JobClient jc, int thresholdMultiplier,
float threshold, boolean onlyMapsProgress, int numIterations) {
this.jc = jc;
this.thresholdMultiplier = thresholdMultiplier;
this.threshold = threshold;
this.onlyMapsProgress = onlyMapsProgress;
this.numIterations = numIterations;
setDaemon(true);
}
public void setRunningJob(RunningJob rJob) {
this.rJob = rJob;
}
public void kill() {
killed = true;
}
public void run() {
killBasedOnProgress(true);
if (!onlyMapsProgress) {
killBasedOnProgress(false);
}
}
private void killBasedOnProgress(boolean considerMaps) {
boolean fail = false;
if (considerMaps) {
LOG.info("Will kill tasks based on Maps' progress");
} else {
LOG.info("Will kill tasks based on Reduces' progress");
}
LOG.info("Initial progress threshold: " + threshold +
". Threshold Multiplier: " + thresholdMultiplier +
". Number of iterations: " + numIterations);
float thresholdVal = threshold;
int numIterationsDone = 0;
while (!killed) {
try {
float progress;
if (jc.getJob(rJob.getID()).isComplete() ||
numIterationsDone == numIterations) {
break;
}
if (considerMaps) {
progress = jc.getJob(rJob.getID()).mapProgress();
} else {
progress = jc.getJob(rJob.getID()).reduceProgress();
}
if (progress >= thresholdVal) {
numIterationsDone++;
if (numIterationsDone > 0 && numIterationsDone % 2 == 0) {
fail = true; //fail tasks instead of kill
}
ClusterStatus c = jc.getClusterStatus();
LOG.info(new Date() + " Killing a few tasks");
Collection<TaskAttemptID> runningTasks =
new ArrayList<TaskAttemptID>();
TaskReport mapReports[] = jc.getMapTaskReports(rJob.getID());
for (TaskReport mapReport : mapReports) {
if (mapReport.getCurrentStatus() == TIPStatus.RUNNING) {
runningTasks.addAll(mapReport.getRunningTaskAttempts());
}
}
if (runningTasks.size() > c.getTaskTrackers()/2) {
int count = 0;
for (TaskAttemptID t : runningTasks) {
LOG.info(new Date() + " Killed task : " + t);
rJob.killTask(t, fail);
if (count++ > runningTasks.size()/2) { //kill 50%
break;
}
}
}
runningTasks.clear();
TaskReport reduceReports[] = jc.getReduceTaskReports(rJob.getID());
for (TaskReport reduceReport : reduceReports) {
if (reduceReport.getCurrentStatus() == TIPStatus.RUNNING) {
runningTasks.addAll(reduceReport.getRunningTaskAttempts());
}
}
if (runningTasks.size() > c.getTaskTrackers()/2) {
int count = 0;
for (TaskAttemptID t : runningTasks) {
LOG.info(new Date() + " Killed task : " + t);
rJob.killTask(t, fail);
if (count++ > runningTasks.size()/2) { //kill 50%
break;
}
}
}
thresholdVal = thresholdVal * thresholdMultiplier;
}
Thread.sleep(5000);
} catch (InterruptedException ie) {
killed = true;
} catch (Exception e) {
LOG.fatal(StringUtils.stringifyException(e));
}
}
}
}
public static void main(String args[]) throws Exception {
int res = ToolRunner.run(new Configuration(), new ReliabilityTest(), args);
System.exit(res);
}
}<|fim▁end|> | //to protect against the case of jobs failing even when multiple attempts |
<|file_name|>paper-dialog.html.0.js<|end_file_name|><|fim▁begin|><|fim▁hole|> var dialog = fixture('basic');
assert.equal(dialog.getAttribute('role'), 'dialog', 'has role="dialog"');
});
});<|fim▁end|> | suite('a11y', function() {
test('dialog has role="dialog"', function() { |
<|file_name|>projectstatus_test.go<|end_file_name|><|fim▁begin|>package describe
import (
"strings"
"testing"
"time"
kapi "github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/errors"
ktestclient "github.com/GoogleCloudPlatform/kubernetes/pkg/client/testclient"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
"github.com/openshift/origin/pkg/client/testclient"
projectapi "github.com/openshift/origin/pkg/project/api"
)
func mustParseTime(t string) time.Time {
out, err := time.Parse(time.RFC3339, t)
if err != nil {
panic(err)
}
return out
}
func TestProjectStatus(t *testing.T) {
testCases := map[string]struct {
Path string
Extra []runtime.Object
ErrFn func(error) bool
Contains []string
Time time.Time
}{
"missing project": {<|fim▁hole|> &projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
DisplayName: "Test",
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project Test (example)\n",
"You have no services, deployment configs, or build configs.",
},
},
"empty service": {
Path: "../../../../test/fixtures/app-scenarios/k8s-service-with-nothing.json",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service empty-service",
"(<initializing>:5432", "To see more information",
},
},
"unstarted build": {
Path: "../../../../test/fixtures/app-scenarios/new-project-no-build.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service sinatra-example-2 (172.30.17.48:8080)",
"builds git://github.com",
"with docker.io/openshift/ruby-20-centos7:latest",
"not built yet",
"#1 deployment waiting on image or update",
"To see more information",
},
},
"running build": {
Path: "../../../../test/fixtures/app-scenarios/new-project-one-build.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service sinatra-example-1 (172.30.17.47:8080)",
"builds git://github.com",
"with docker.io/openshift/ruby-20-centos7:latest",
"build 1 running for about a minute",
"#1 deployment waiting on image or update",
"To see more information",
},
Time: mustParseTime("2015-04-06T21:20:03Z"),
},
"a/b test deployment config": {
Path: "../../../../test/fixtures/app-scenarios/new-project-two-deployment-configs.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service sinatra-app-example (172.30.17.49:8080)",
"sinatra-app-example-a deploys",
"sinatra-app-example-b deploys",
"with docker.io/openshift/ruby-20-centos7:latest",
"build 1 running for about a minute",
"- 7a4f354: Prepare v1beta3 Template types (Roy Programmer <[email protected]>)",
"To see more information",
},
Time: mustParseTime("2015-04-06T21:20:03Z"),
},
"with real deployments": {
Path: "../../../../test/fixtures/app-scenarios/new-project-deployed-app.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service database (172.30.17.240:5434 -> 3306)",
"service frontend (172.30.17.154:5432 -> 8080)",
"database deploys",
"frontend deploys",
"with example/ruby-20-centos7:latest",
"#1 deployed 8 seconds ago",
"#1 deployed less than a second ago",
"To see more information",
},
Time: mustParseTime("2015-04-07T04:12:25Z"),
},
}
oldTimeFn := timeNowFn
defer func() { timeNowFn = oldTimeFn }()
for k, test := range testCases {
timeNowFn = func() time.Time {
if !test.Time.IsZero() {
return test.Time
}
return time.Now()
}
o := ktestclient.NewObjects(kapi.Scheme)
if len(test.Path) > 0 {
if err := ktestclient.AddObjectsFromPath(test.Path, o); err != nil {
t.Fatal(err)
}
}
for _, obj := range test.Extra {
o.Add(obj)
}
osc, kc := testclient.NewFixtureClients(o)
d := ProjectStatusDescriber{C: osc, K: kc}
out, err := d.Describe("example", "")
if !test.ErrFn(err) {
t.Errorf("%s: unexpected error: %v", k, err)
}
if err != nil {
continue
}
for _, s := range test.Contains {
if !strings.Contains(out, s) {
t.Errorf("%s: did not have %q:\n%s\n---", k, s, out)
}
}
//t.Logf("\n%s", out)
}
}<|fim▁end|> | ErrFn: func(err error) bool { return errors.IsNotFound(err) },
},
"empty project with display name": {
Extra: []runtime.Object{ |
<|file_name|>infinitum_sq.ts<|end_file_name|><|fim▁begin|><TS language="sq" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Kliko me të djathtën për të ndryshuar adresën ose etiketen.</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Krijo një adresë të re</translation>
</message>
<message>
<source>&New</source>
<translation>&E re</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopjo adresën e zgjedhur në memorjen e sistemit </translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopjo</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Fshi adresen e selektuar nga lista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporto të dhënat e skedës korrente në një skedar</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Fshi</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Enter passphrase</source>
<translation>Futni frazkalimin</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Frazkalim i ri</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Përsërisni frazkalimin e ri</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
</context>
<context>
<name>InfinitumGUI</name>
<message>
<source>Synchronizing with network...</source>
<translation>Duke u sinkronizuar me rrjetin...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Përmbledhje</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Trego një përmbledhje te përgjithshme të portofolit</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaksionet</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Shfleto historinë e transaksioneve</translation>
</message>
<message>
<source>Quit application</source>
<translation>Mbyllni aplikacionin</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opsione</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>Duke marr adresen</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ndrysho frazkalimin e përdorur per enkriptimin e portofolit</translation>
</message>
<message>
<source>Infinitum</source>
<translation>Infinitum</translation>
</message>
<message>
<source>Wallet</source>
<translation>Portofol</translation>
</message>
<message>
<source>&Send</source>
<translation>&Dergo</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Merr</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Shfaq / Fsheh</translation>
</message>
<message>
<source>&File</source>
<translation>&Skedar</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Konfigurimet</translation>
</message>
<message>
<source>&Help</source>
<translation>&Ndihmë</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Shiriti i mjeteve</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 dhe %2</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 Pas</translation>
</message>
<message>
<source>Error</source>
<translation>Problem</translation>
</message>
<message>
<source>Information</source>
<translation>Informacion</translation>
</message>
<message>
<source>Up to date</source>
<translation>I azhornuar</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Duke u azhornuar...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Dërgo transaksionin</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Transaksion në ardhje</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Portofoli po <b> enkriptohet</b> dhe është <b> i ç'kyçur</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Portofoli po <b> enkriptohet</b> dhe është <b> i kyçur</b></translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Zgjedhja e monedhes</translation>
</message>
<message>
<source>Amount:</source>
<translation>Shuma:</translation>
</message>
<message>
<source>Amount</source>
<translation>Sasia</translation>
</message>
<message>
<source>Date</source>
<translation>Data</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Ndrysho Adresën</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etiketë</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adresa</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>name</source>
<translation>emri</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>versioni</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Miresevini</translation>
</message>
<message>
<source>Error</source>
<translation>Problem</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opsionet</translation>
</message>
<message>
<source>W&allet</source>
<translation>Portofol</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formilarë</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Sasia</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>&Information</source>
<translation>Informacion</translation>
</message>
<message>
<source>&Open</source>
<translation>&Hap</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Pastro</translation>
</message>
<message>
<source>never</source>
<translation>asnjehere</translation>
</message>
<message>
<source>Unknown</source>
<translation>i/e panjohur</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>Shuma:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiketë:</translation>
</message>
<message>
<source>Clear</source>
<translation>Pastro</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Copy &Address</source>
<translation>&Kopjo adresen</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Dërgo Monedha</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Fonde te pamjaftueshme</translation>
</message>
<message>
<source>Amount:</source>
<translation>Shuma:</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Dërgo marrësve të ndryshëm njëkohësisht</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balanca:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Konfirmo veprimin e dërgimit</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Sh&uma:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>Paguaj &drejt:</translation>
</message><|fim▁hole|> <message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Ngjit nga memorja e sistemit</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Paguaj drejt:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Ngjit nga memorja e sistemit</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>[testnet]</source>
<translation>[testo rrjetin]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ky panel tregon një përshkrim të detajuar të transaksionit</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>infinitum-core</name>
<message>
<source>Options:</source>
<translation>Opsionet:</translation>
</message>
<message>
<source>Infinitum Core</source>
<translation>Berthama Infinitum</translation>
</message>
<message>
<source>Information</source>
<translation>Informacion</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Fonde te pamjaftueshme</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Rikerkim</translation>
</message>
<message>
<source>Error</source>
<translation>Problem</translation>
</message>
</context>
</TS><|fim▁end|> | <message>
<source>&Label:</source>
<translation>&Etiketë:</translation>
</message> |
<|file_name|>Channel.py<|end_file_name|><|fim▁begin|>from PIL import Image
class Channel:
def __init__(self, channelLabel, size):
<|fim▁hole|> self.channelLabel = channelLabel
self.channel = Image.new("CMYK", (size[0], size[1]), "black")
self.pixelMap = self.channel.load()
def save(self, filename):
self.channel.save(filename)<|fim▁end|> | |
<|file_name|>sr-latn.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'basicstyles', 'sr-latn', {
bold: 'Podebljano',
italic: 'Kurziv',
strike: 'Precrtano',
subscript: 'Indeks',
superscript: 'Stepen',
underline: 'Podvučeno'<|fim▁hole|><|fim▁end|> | } ); |
<|file_name|>history-assistant.js<|end_file_name|><|fim▁begin|>function HistoryAssistant() {
}
HistoryAssistant.prototype.setup = function() {
this.appMenuModel = {
visible: true,
items: [
{ label: $L("About"), command: 'about' },
{ label: $L("Help"), command: 'tutorial' },
]
};
this.controller.setupWidget(Mojo.Menu.appMenu, {omitDefaultItems: true}, this.appMenuModel);
var attributes = {};
this.model = {
//backgroundImage : 'images/glacier.png',
background: 'black',
onLeftFunction : this.wentLeft.bind(this),
onRightFunction : this.wentRight.bind(this)
}
this.controller.setupWidget('historydiv', attributes, this.model);
this.myPhotoDivElement = $('historydiv');
this.timestamp = new Date().getTime();
var env = Mojo.Environment.DeviceInfo;
if (env.screenHeight <= 400)
this.myPhotoDivElement.style.height = "372px";
}
HistoryAssistant.prototype.wentLeft = function(event){
this.timestamp = this.timestamp - (1000*60*60*24);
var timenow = new Date(this.timestamp);
var timenowstring = "";
var Ayear = timenow.getUTCFullYear();
var Amonth = timenow.getUTCMonth()+1;
if(Amonth.toString().length < 2)
Amonth = "0" + Amonth;
var Aday = timenow.getUTCDate();
if(Aday.toString().length < 2)
Aday = "0" + Aday;
/*var Ahours = timenow.getUTCHours();
if(Ahours.toString().length < 2)
Ahours = "0" + Ahours;*/
Ahours = "00";
if (this.timestamp > 1276146000000) {
if (this.timestamp > 1276146000000 + (1000 * 60 * 60 * 24)) {
this.myPhotoDivElement.mojo.leftUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday - 1) + Ahours + ".png");
}
this.myPhotoDivElement.mojo.centerUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday) + Ahours + ".png");<|fim▁hole|> this.myPhotoDivElement.mojo.rightUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday + 1) + Ahours + ".png");
}
$('text').innerHTML = "<center>Histoy Of Pixels - " + Ayear + " / " + Amonth + " / " + Aday + "</center>";
}
HistoryAssistant.prototype.wentRight = function(event){
this.timestamp = this.timestamp + (1000*60*60*24);
var timenow = new Date(this.timestamp);
var timenowstring = "";
var Ayear = timenow.getUTCFullYear();
var Amonth = timenow.getUTCMonth()+1;
if(Amonth.toString().length < 2)
Amonth = "0" + Amonth;
var Aday = timenow.getUTCDate();
if(Aday.toString().length < 2)
Aday = "0" + Aday;
/*var Ahours = timenow.getUTCHours();
if(Ahours.toString().length < 2)
Ahours = "0" + Ahours;*/
Ahours = "00";
if (this.timestamp < new Date().getTime()) {
this.myPhotoDivElement.mojo.leftUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday - 1) + Ahours + ".png");
this.myPhotoDivElement.mojo.centerUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday) + Ahours + ".png");
if (this.timestamp + (1000*60*60*24) < new Date().getTime()) {
this.myPhotoDivElement.mojo.rightUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday + 1) + Ahours + ".png");
}
}
$('text').innerHTML = "<center>Histoy Of Pixels - " + Ayear + " / " + Amonth + " / " + Aday + "</center>";
}
HistoryAssistant.prototype.activate = function(event) {
var timenow = new Date(this.timestamp);
var timenowstring = "";
var Ayear = timenow.getUTCFullYear();
var Amonth = timenow.getUTCMonth()+1;
if(Amonth.toString().length < 2)
Amonth = "0" + Amonth;
var Aday = timenow.getUTCDate();
if(Aday.toString().length < 2)
Aday = "0" + Aday;
/*var Ahours = timenow.getUTCHours();
if(Ahours.toString().length < 2)
Ahours = "0" + Ahours;*/
Ahours = "00";
this.myPhotoDivElement.mojo.leftUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday-1) + Ahours + ".png");
this.myPhotoDivElement.mojo.centerUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday) + Ahours + ".png");
//this.myPhotoDivElement.mojo.rightUrlProvided("http://100ps.omoco.de/100pixels/history/" + Ayear + Amonth + (Aday+1) + Ahours + ".png");
$('text').innerHTML = "<center>Histoy Of Pixels - " + Ayear + " / " + Amonth + " / " + Aday + "</center>";
}
HistoryAssistant.prototype.deactivate = function(event) {
}
HistoryAssistant.prototype.cleanup = function(event) {
}
HistoryAssistant.prototype.handleCommand = function(event){
if(event.type == Mojo.Event.command) {
switch (event.command) {
case 'about':
Mojo.Controller.stageController.pushScene("about");
break;
case 'tutorial':
this.controller.showAlertDialog({
onChoose: function(value) {},
title:"Help",
message:"This is the history of all pixels user have every created. Every night there is made a new snapshot.<br><br>Flip the image left or right to go through the history. Zoom into the image for more details.",
allowHTMLMessage: true,
choices:[ {label:'OK', value:'OK', type:'color'} ]
});
break;
}
}
}<|fim▁end|> | |
<|file_name|>factory.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of HEPData.
# Copyright (C) 2016 CERN.
#
# HEPData is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# HEPData is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HEPData; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""HEPData application factories."""
import os
import sys
from invenio_base.app import create_app_factory
from invenio_base.wsgi import create_wsgi_factory
from invenio_config import create_config_loader
<|fim▁hole|>
env_prefix = 'APP'
conf_loader = create_config_loader(config=config, env_prefix=env_prefix)
instance_path = os.getenv(env_prefix + '_INSTANCE_PATH') or \
os.path.join(sys.prefix, 'var', 'hepdata-instance')
static_folder = os.getenv(env_prefix + '_STATIC_FOLDER') or \
os.path.join(instance_path, 'static')
create_api = create_app_factory(
'hepdata',
config_loader=conf_loader,
extension_entry_points=['invenio_base.api_apps'],
blueprint_entry_points=['invenio_base.api_blueprints'],
instance_path=instance_path,
)
create_app = create_app_factory(
'hepdata',
config_loader=conf_loader,
extension_entry_points=['invenio_base.apps'],
blueprint_entry_points=['invenio_base.blueprints'],
wsgi_factory=create_wsgi_factory({'/api': create_api}),
instance_path=instance_path,
static_folder=static_folder,
)<|fim▁end|> | from . import config |
<|file_name|>ContactsMenuDO.java<|end_file_name|><|fim▁begin|>package com.wodejia.myapp.data.contacts;
import java.io.Serializable;
/**
* Created by clarence on 16/9/2.
*/
public class ContactsMenuDO implements Serializable {
private int key;
private String value;
private String title;
public int getKey() {
return key;
}
public void setKey(int key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getTitle() {
return title;
}<|fim▁hole|>
public void setTitle(String title) {
this.title = title;
}
}<|fim▁end|> | |
<|file_name|>qp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Fri Nov 15 15:55:28 2013
@author: dyanna
"""
import numpy as np
from sklearn.svm import SVC
def getSample(pointA, pointB, numberOfPoints):
pointList = list(zip(np.random.uniform(-1,1.00,numberOfPoints),np.random.uniform(-1,1.00,numberOfPoints)))
sample = np.array([(i[0], i[1], isLeft(pointA, pointB, i)) for i in pointList])
y = sample[:,2]
breakpoint = False
while not breakpoint:
if(len(y[y==-1]) == 0 or len(y[y==1]) == 0):
pointList = list(zip(np.random.uniform(-1,1.00,numberOfPoints),np.random.uniform(-1,1.00,numberOfPoints)))
sample = np.array([(i[0], i[1], isLeft(pointA, pointB, i)) for i in pointList])
y = sample[:,2]
else:
breakpoint = True
return sample
def getRandomLine():
return list(zip(np.random.uniform(-1,1.00,2),np.random.uniform(-1,1.00,2)))
def getPoints(numberOfPoints):
pointList = list(zip(np.random.uniform(-1,1.00,numberOfPoints),np.random.uniform(-1,1.00,numberOfPoints)))
return pointList
def isLeft(a, b, c):
return 1 if ((b[0] - a[0])*(c[1] - a[1]) - (b[1] - a[1])*(c[0] - a[0])) > 0 else -1;
def sign(x):
return 1 if x > 0 else -1
def getMisMatchesQP(data, clf):
#print(data)
data_x = np.c_[data[:,0], data[:,1]]
results = clf.predict(data_x)
#print(np.sign(results))
print("mismatch ", float(len(data) - np.sum(np.sign(results) == np.sign(data[:,2])))/len(data))
print("score ", clf.score(data_x, data[:,2]))
return float(len(data) - np.sum(np.sign(results) == np.sign(data[:,2])))/len(data)
def doMonteCarloQP(pointa, pointb, clf, nopoint):
#print "weights ", weight
points = [(np.random.uniform(-1,1), np.random.uniform(-1,1)) for i in range(nopoint)]
#print points
dataset_Monte = np.array([(i[0],i[1], isLeft(pointa,pointb,i)) for i in points])
#print dataset_Monte
return getMisMatchesQP(dataset_Monte, clf)
def doPLA(sample):
w = np.array([0,0,0])
iteration = 0
it = 0
while True:#(it < 10):
iteration = iteration + 1
it = it + 1
mismatch = list()
for i in sample:
#print("point in question ", i , " weight ", w)
yy = w[0] + w[1] * i[0] + w[2] * i[1]
#print("this is after applying weight to a point ",yy)
point = [i[0], i[1], sign(yy)]
if any(np.equal(sample, point).all(1)):
#print "point not in sample"
if(point[2] == -1):
mismatch.append((1, (i[0]), (i[1])))
else:
mismatch.append((-1, -(i[0]), -(i[1])))
#print " length ", len(mismatch), " mismatch list ",mismatch
if(len(mismatch) > 0):
#find a random point and update w
choiceIndex = np.random.randint(0, len(mismatch))
choice = mismatch[choiceIndex]
#print("choice ", choice)
w = w + choice
#print "new weight ", w
else:
break
#print("this is the iteration ", iteration)
#print("this is the weight ", w)
#montelist = [monetcarlo((x1,y1),(x2,y2),w,10000) for i in range(5)]
#print("Montelist " , montelist)
#monteavg = sum([i for i in montelist])/10
return w, iteration
def getMisMatches(data, weights):
#print data
list1 = np.empty(len(data))
list1.fill(weights[0])
results = list1+ weights[1]*data[:,0]+weights[2]*data[:,1]
results = -1 * results
return float(len(data) - np.sum(np.sign(results) == np.sign(data[:,2])))/len(data)
def doMonteCarloNP(pointa, pointb, weights, nopoint):
#print "weights ", weight
points = [(np.random.uniform(-1,1), np.random.uniform(-1,1)) for i in range(nopoint)]
#print points
dataset_Monte = np.array([(i[0],i[1], isLeft(pointa,pointb,i)) for i in points])
#print dataset_Monte
return getMisMatches(dataset_Monte, weights)
if __name__ == "__main__":
'''X = np.array([[-1,-1],[-2,-1], [1,1], [2,1]])
y = np.array([1,1,2,2])
clf = SVC()
clf.fit(X,y)
print(clf.predict([[-0.8,-1]]))'''
#clf = SVC()
clf = SVC(C = 1000, kernel = 'linear')
monteavgavgQP = list()
monteavgavgPLA = list()<|fim▁hole|> vectornumberavg = list()
predictavg = list()
for j in range(1):
#clf = SVC(C = 1000, kernel = 'linear')
monteavgQP = list()
monteavgPLA = list()
approxQP = list()
vectoravg = list()
for k in range(1000):
nopoints = 100
line = getRandomLine()
sample = getSample(line[0], line[1], nopoints)
#print(sample)
X = np.c_[sample[:,0], sample[:,1]]
y = sample[:,2]
#print(y)
clf.fit(X,y)
#print(clf.score(X,y))
w, it = doPLA(sample)
#print(len(clf.support_vectors_))
#print(clf.support_vectors_)
#print(clf.support_)
vectoravg.append(len(clf.support_vectors_))
#print(clf.predict(clf.support_vectors_)==1)
#print(clf.predict(clf.support_vectors_))
#print(clf.coef_)
montelistQP = [doMonteCarloQP(line[0], line[1], clf, 500) for i in range(1)]
qpMonte = sum(montelistQP)/len(montelistQP)
monteavgQP.append(sum(montelistQP)/len(montelistQP))
montelist = [ doMonteCarloNP(line[0], line[1], w, 500) for i in range(1)]
plaMonte = sum(montelist)/len(montelist)
monteavgPLA.append(plaMonte)
if(montelistQP < monteavgPLA):
approxQP.append(1)
else:
approxQP.append(0)
#print(sum(monteavgQP)/len(monteavgQP))
#print(sum(monteavgPLA)/len(monteavgPLA))
#print(sum(approxQP)/len(approxQP))
monteavgavgQP.append(sum(monteavgQP)/len(monteavgQP))
monteavgavgPLA.append(sum(monteavgPLA)/len(monteavgPLA))
approxavgQP.append(sum(approxQP)/len(approxQP))
vectornumberavg.append(sum(vectoravg)/len(vectoravg))
print(sum(monteavgavgQP)/len(monteavgavgQP))
print(sum(monteavgavgPLA)/len(monteavgavgPLA))
print("how good is it? ", sum(approxavgQP)/len(approxavgQP))
print("how good is it? ", sum(vectornumberavg)/len(vectornumberavg))<|fim▁end|> | approxavgQP = list() |
<|file_name|>redis_kv.py<|end_file_name|><|fim▁begin|># (c) 2012, Jan-Piet Mens <jpmens(at)gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: redis_kv
author: Jan-Piet Mens <jpmens(at)gmail.com>
version_added: "0.9"
short_description: fetch data from Redis
description:
- this looup returns a list of items given to it, if any of the top level items is also a list it will flatten it, but it will not recurse
requirements:
- redis (python library https://github.com/andymccurdy/redis-py/)
options:
_terms:
description: Two element comma separated strings composed of url of the Redis server and key to query
options:
_url:
description: location of redis host in url format
default: 'redis://localhost:6379'
_key:
description: key to query
required: True
"""
EXAMPLES = """
- name: query redis for somekey
debug: msg="{{ lookup('redis_kv', 'redis://localhost:6379,somekey') }} is value in Redis for somekey"
"""
RETURN = """
_raw:
description: values stored in Redis
"""
import os
import re
HAVE_REDIS = False
try:
import redis
HAVE_REDIS = True
except ImportError:
pass
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
# ==============================================================
# REDISGET: Obtain value from a GET on a Redis key. Terms
# expected: 0 = URL, 1 = Key
# URL may be empty, in which case redis://localhost:6379 assumed
# --------------------------------------------------------------
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not HAVE_REDIS:
raise AnsibleError("Can't LOOKUP(redis_kv): module redis is not installed")
ret = []
for term in terms:
(url, key) = term.split(',')
if url == "":
url = 'redis://localhost:6379'
# urlsplit on Python 2.6.1 is broken. Hmm. Probably also the reason
# Redis' from_url() doesn't work here.
p = '(?P<scheme>[^:]+)://?(?P<host>[^:/ ]+).?(?P<port>[0-9]*).*'
try:
m = re.search(p, url)
host = m.group('host')
port = int(m.group('port'))
except AttributeError:
raise AnsibleError("Bad URI in redis lookup")
try:
conn = redis.Redis(host=host, port=port)<|fim▁hole|> res = ""
ret.append(res)
except:
ret.append("") # connection failed or key not found
return ret<|fim▁end|> | res = conn.get(key)
if res is None: |
<|file_name|>alias1.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Dmitry Vyukov. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
// run
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Test that dynamic interface checks treat byte=uint8
// and rune=int or rune=int32.
package main
func main() {
var x interface{}
x = byte(1)
switch x.(type) {
case uint8:
// ok
default:
panic("byte != uint8")
}
x = uint8(2)
switch x.(type) {
case byte:
// ok
default:
panic("uint8 != byte")
}
rune32 := false
x = rune(3)
switch x.(type) {<|fim▁hole|> case int32:
// must be new code
rune32 = true
default:
panic("rune != int and rune != int32")
}
if rune32 {
x = int32(4)
} else {
x = int(5)
}
switch x.(type) {
case rune:
// ok
default:
panic("int (or int32) != rune")
}
}<|fim▁end|> | case int:
// ok |
<|file_name|>wrapper_test_71.cpp<|end_file_name|><|fim▁begin|>#include "Halide.h"
#include <tiramisu/utils.h>
#include <cstdlib>
#include <iostream>
#include "wrapper_test_71.h"
#ifdef __cplusplus
extern "C" {
#endif
#ifdef __cplusplus
} // extern "C"
#endif
// We assume that the increment is 1.
void reference_saxpy(int N1, float alpha, float *A, float *B)
{
for (int i=0; i<N1; i++)
B[i] = alpha*A[i] + B[i];
}
int main(int, char **)
{
Halide::Buffer<float> a(1, "a");
Halide::Buffer<float> x(SIZE, "x");
Halide::Buffer<float> y_ref(SIZE, "y_ref");<|fim▁hole|> init_buffer(y_ref, (float)1);
init_buffer(a, (float)1);
reference_saxpy(SIZE, 1, x.data(), y_ref.data());
tiramisu_generated_code(a.raw_buffer(), x.raw_buffer(), y.raw_buffer());
compare_buffers("test_" + std::string(TEST_NUMBER_STR) + "_" + std::string(TEST_NAME_STR), y, y_ref);
return 0;
}<|fim▁end|> | Halide::Buffer<float> y(SIZE, "y");
init_buffer(x, (float)1);
init_buffer(y, (float)1); |
<|file_name|>moreLikeThis.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Matthew Baird
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.<|fim▁hole|>// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package core
import (
"encoding/json"
"fmt"
"github.com/splicers/elastigo/api"
)
// MoreLikeThis allows the caller to get documents that are “like” a specified document.
// http://www.elasticsearch.org/guide/reference/api/more-like-this.html
func MoreLikeThis(index string, _type string, id string, args map[string]interface{}, query MoreLikeThisQuery) (api.BaseResponse, error) {
var url string
var retval api.BaseResponse
url = fmt.Sprintf("/%s/%s/%s/_mlt", index, _type, id)
body, err := api.DoCommand("GET", url, args, query)
if err != nil {
return retval, err
}
if err == nil {
// marshall into json
jsonErr := json.Unmarshal(body, &retval)
if jsonErr != nil {
return retval, jsonErr
}
}
return retval, err
}
type MoreLikeThisQuery struct {
MoreLikeThis MLT `json:"more_like_this"`
}
type MLT struct {
Fields []string `json:"fields"`
LikeText string `json:"like_text"`
PercentTermsToMatch float32 `json:"percent_terms_to_match"`
MinTermFrequency int `json:"min_term_freq"`
MaxQueryTerms int `json:"max_query_terms"`
StopWords []string `json:"stop_words"`
MinDocFrequency int `json:"min_doc_freq"`
MaxDocFrequency int `json:"max_doc_freq"`
MinWordLength int `json:"min_word_len"`
MaxWordLength int `json:"max_word_len"`
BoostTerms int `json:"boost_terms"`
Boost float32 `json:"boost"`
Analyzer string `json:"analyzer"`
}<|fim▁end|> | |
<|file_name|>LSTM2.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.contrib import rnn
import time
from datetime import timedelta
# Import MNIST data
import matplotlib.pyplot as plt
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
# Training Parameters
learning_rate = 0.005
training_steps = 15000
batch_size = 128
display_step = 200
<|fim▁hole|>num_input = 28 # MNIST data input (img shape: 28*28)
timesteps = 28 # timesteps
num_hidden = 128 # hidden layer num of features
num_classes = 10 # MNIST total classes (0-9 digits)
# tf Graph input
X = tf.placeholder("float", [None, timesteps, num_input])
Y = tf.placeholder("float", [None, num_classes])
# Define weights
weights = {
'out': tf.Variable(tf.random_normal([num_hidden, num_classes]))
}
biases = {
'out': tf.Variable(tf.random_normal([num_classes]))
}
def RNN(x, weights, biases):
# Prepare data shape to match `rnn` function requirements
# Current data input shape: (batch_size, timesteps, n_input)
# Required shape: 'timesteps' tensors list of shape (batch_size, n_input)
# Unstack to get a list of 'timesteps' tensors of shape (batch_size,
# n_input)
x = tf.unstack(x, timesteps, 1)
# Define a lstm cell with tensorflow
lstm_cell = rnn.BasicLSTMCell(num_hidden, forget_bias=1.0)
# Get lstm cell output
outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32)
# Linear activation, using rnn inner loop last output
return tf.matmul(outputs[-1], weights['out']) + biases['out']
logits = RNN(X, weights, biases)
prediction = tf.nn.softmax(logits)
# Define loss and optimizer
loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=logits, labels=Y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss_op)
# Evaluate model (with test logits, for dropout to be disabled)
correct_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# Initialize the variables (i.e. assign their default value)
init = tf.global_variables_initializer()
loss_group = []
epoch_group = []
# Start training
with tf.Session() as sess:
# Run the initializer
sess.run(init)
start_time = time.time()
for step in range(1, training_steps + 1):
tf.set_random_seed(23)
batch_x, batch_y = mnist.train.next_batch(batch_size)
# Reshape data to get 28 seq of 28 elements
batch_x = batch_x.reshape((batch_size, timesteps, num_input))
# Run optimization op (backprop)
sess.run(train_op, feed_dict={X: batch_x, Y: batch_y})
if step % display_step == 0 or step == 1:
# Calculate batch loss and accuracy
loss, acc = sess.run([loss_op, accuracy], feed_dict={X: batch_x,
Y: batch_y})
loss_group.append(loss)
epoch_group.append(step)
print("Step " + str(step) + ", Minibatch Loss= " +
"{:.4f}".format(loss) + ", Training Accuracy= " +
"{:.3f}".format(acc))
print("Optimization Finished!")
print(loss_group)
print(epoch_group)
plt.plot(epoch_group, loss_group)
plt.show()
end_time = time.time()
time_dif = end_time - start_time
print("Time usage: " + str(timedelta(seconds=int(round(time_dif)))))
# Calculate accuracy for 128 mnist test images
test_len = 128
test_data = mnist.test.images[:test_len].reshape(
(-1, timesteps, num_input))
test_label = mnist.test.labels[:test_len]
print("Testing Accuracy:",
sess.run(accuracy, feed_dict={X: test_data, Y: test_label}))<|fim▁end|> | # Network Parameters |
<|file_name|>callbacks.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import print_function
import theano
import theano.tensor as T
import numpy as np
import time, json, warnings
from collections import deque
from .utils.generic_utils import Progbar
class CallbackList(object):
def __init__(self, callbacks=[], queue_length=10):
self.callbacks = [c for c in callbacks]
self.queue_length = queue_length
def append(self, callback):
self.callbacks.append(callback)
def _set_params(self, params):
for callback in self.callbacks:
callback._set_params(params)
def _set_model(self, model):
for callback in self.callbacks:
callback._set_model(model)
def on_epoch_begin(self, epoch, logs={}):
for callback in self.callbacks:
callback.on_epoch_begin(epoch, logs)
self._delta_t_batch = 0.
self._delta_ts_batch_begin = deque([], maxlen=self.queue_length)
self._delta_ts_batch_end = deque([], maxlen=self.queue_length)
def on_epoch_end(self, epoch, logs={}):
for callback in self.callbacks:
callback.on_epoch_end(epoch, logs)
def on_batch_begin(self, batch, logs={}):
t_before_callbacks = time.time()
for callback in self.callbacks:
callback.on_batch_begin(batch, logs)
self._delta_ts_batch_begin.append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts_batch_begin)
if self._delta_t_batch > 0. and delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1:
warnings.warn('Method on_batch_begin() is slow compared '
'to the batch update (%f). Check your callbacks.' % delta_t_median)
self._t_enter_batch = time.time()
def on_batch_end(self, batch, logs={}):
self._delta_t_batch = time.time() - self._t_enter_batch
t_before_callbacks = time.time()
for callback in self.callbacks:
callback.on_batch_end(batch, logs)
self._delta_ts_batch_end.append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts_batch_end)
if self._delta_t_batch > 0. and delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1:
warnings.warn('Method on_batch_end() is slow compared '
'to the batch update (%f). Check your callbacks.' % delta_t_median)
def on_train_begin(self, logs={}):
for callback in self.callbacks:
callback.on_train_begin(logs)
def on_train_end(self, logs={}):
for callback in self.callbacks:
callback.on_train_end(logs)
class Callback(object):
def __init__(self):
pass
def _set_params(self, params):
self.params = params
def _set_model(self, model):
self.model = model
def on_epoch_begin(self, epoch, logs={}):
pass
def on_epoch_end(self, epoch, logs={}):
pass
def on_batch_begin(self, batch, logs={}):
pass
def on_batch_end(self, batch, logs={}):
pass
def on_train_begin(self, logs={}):
pass
def on_train_end(self, logs={}):
pass
class BaseLogger(Callback):
def on_train_begin(self, logs={}):
self.verbose = self.params['verbose']
def on_epoch_begin(self, epoch, logs={}):
if self.verbose:
print('Epoch %d' % epoch)
self.progbar = Progbar(target=self.params['nb_sample'],
verbose=self.verbose)
self.seen = 0
self.totals = {}
def on_batch_begin(self, batch, logs={}):
if self.seen < self.params['nb_sample']:
self.log_values = []
def on_batch_end(self, batch, logs={}):
batch_size = logs.get('size', 0)
self.seen += batch_size
for k, v in logs.items():
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
# skip progbar update for the last batch; will be handled by on_epoch_end
if self.verbose and self.seen < self.params['nb_sample']:
self.progbar.update(self.seen, self.log_values)
def on_epoch_end(self, epoch, logs={}):
for k in self.params['metrics']:
if k in self.totals:
self.log_values.append((k, self.totals[k] / self.seen))
if k in logs:
self.log_values.append((k, logs[k]))
if self.verbose:
self.progbar.update(self.seen, self.log_values)
class History(Callback):
def on_train_begin(self, logs={}):
self.epoch = []
self.history = {}
def on_epoch_begin(self, epoch, logs={}):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs={}):
batch_size = logs.get('size', 0)
self.seen += batch_size
for k, v in logs.items():
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs={}):
self.epoch.append(epoch)
for k, v in self.totals.items():
if k not in self.history:
self.history[k] = []
self.history[k].append(v / self.seen)
for k, v in logs.items():
if k not in self.history:
self.history[k] = []
self.history[k].append(v)<|fim▁hole|>
class ModelCheckpoint(Callback):
def __init__(self, filepath, monitor='val_loss', verbose=0, save_best_only=False):
super(Callback, self).__init__()
self.monitor = monitor
self.verbose = verbose
self.filepath = filepath
self.save_best_only = save_best_only
self.best = np.Inf
def on_epoch_end(self, epoch, logs={}):
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
warnings.warn("Can save best model only with %s available, skipping." % (self.monitor), RuntimeWarning)
else:
if current < self.best:
if self.verbose > 0:
print("Epoch %05d: %s improved from %0.5f to %0.5f, saving model to %s"
% (epoch, self.monitor, self.best, current, self.filepath))
self.best = current
self.model.save_weights(self.filepath, overwrite=True)
else:
if self.verbose > 0:
print("Epoch %05d: %s did not improve" % (epoch, self.monitor))
else:
if self.verbose > 0:
print("Epoch %05d: saving model to %s" % (epoch, self.filepath))
self.model.save_weights(self.filepath, overwrite=True)
class EarlyStopping(Callback):
def __init__(self, monitor='val_loss', patience=0, verbose=0):
super(Callback, self).__init__()
self.monitor = monitor
self.patience = patience
self.verbose = verbose
self.best = np.Inf
self.wait = 0
def on_epoch_end(self, epoch, logs={}):
current = logs.get(self.monitor)
if current is None:
warnings.warn("Early stopping requires %s available!" % (self.monitor), RuntimeWarning)
if current < self.best:
self.best = current
self.wait = 0
else:
if self.wait >= self.patience:
if self.verbose > 0:
print("Epoch %05d: early stopping" % (epoch))
self.model.stop_training = True
self.wait += 1
class RemoteMonitor(Callback):
def __init__(self, root='http://localhost:9000'):
self.root = root
def on_epoch_begin(self, epoch, logs={}):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs={}):
batch_size = logs.get('size', 0)
self.seen += batch_size
for k, v in logs.items():
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs={}):
import requests
send = {}
send['epoch'] = epoch
for k, v in self.totals.items():
send[k] = v / self.seen
for k, v in self.logs:
send[k] = v
r = requests.post(self.root + '/publish/epoch/end/', {'data': json.dumps(send)})<|fim▁end|> | |
<|file_name|>client.py<|end_file_name|><|fim▁begin|>from simpletcp.clientsocket import ClientSocket
s1 = ClientSocket("localhost", 5000)
response = s1.send("Hello, World!")
s2 = ClientSocket("localhost", 5000, single_use=False)
r1 = s2.send("Hello for the first time...")
r2 = s2.send("...and hello for the last!")
s2.close()
# Display the correspondence
print("s1 sent\t\tHello, World!")<|fim▁hole|>print("s2 received\t\t{}".format(r1.decode("UTF-8")))
print("s2 sent\t\t...and hello for the last!.")
print("s2 received\t\t{}".format(r2.decode("UTF-8")))<|fim▁end|> | print("s1 received\t\t{}".format(response.decode("UTF-8")))
print("-------------------------------------------------")
print("s2 sent\t\tHello for the first time....") |
<|file_name|>age.js<|end_file_name|><|fim▁begin|>import config from '../components/configLoader';
import { addToDefaultPluginDOM } from '../components/helpers';<|fim▁hole|>
const pluginConfig = config.plugins.find(obj => obj.name === 'age');
// DOM setup
const pluginId = 'js-plugin-age';
addToDefaultPluginDOM(pluginId);
const ageDOM = document.getElementById(pluginId);
const renderAge = () => {
const { birthday, goal } = pluginConfig;
// Inspired by:
// Alex MacCaw https://github.com/maccman/motivation
const now = new Date();
const age = (now - new Date(birthday)) / 3.1556952e+10; // divided by 1 year in ms
let remainder = 100 - (age / goal * 100);
let goalPrefix = 'left until';
if (remainder < 0) {
goalPrefix = 'over goal of';
remainder = -remainder;
}
ageDOM.innerHTML = `Age: ${age.toFixed(5)}, ${remainder.toFixed(2)}% ${goalPrefix} ${goal}`;
};
// Initialize plugin
export const init = () => renderAge(); // eslint-disable-line import/prefer-default-export<|fim▁end|> | |
<|file_name|>constants.rs<|end_file_name|><|fim▁begin|>pub const ENV_RPC_ADDR: &str = "RPC_ADDR";
pub const ENV_RPC_PASSWORD: &str = "RPC_PASSWORD";
pub const STATE_FILE_NAME: &str = "client_state.xml";<|fim▁hole|>pub const ALL_PROJECTS_LIST_FILENAME: &str = "all_projects_list.xml";<|fim▁end|> | |
<|file_name|>helpers.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package helper
import (
"crypto/md5"
"encoding/json"
"fmt"
"strings"
"time"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/conversion"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/selection"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/client-go/pkg/api"
)
// NonConvertibleFields iterates over the provided map and filters out all but
// any keys with the "non-convertible.kubernetes.io" prefix.
func NonConvertibleFields(annotations map[string]string) map[string]string {
nonConvertibleKeys := map[string]string{}
for key, value := range annotations {
if strings.HasPrefix(key, api.NonConvertibleAnnotationPrefix) {
nonConvertibleKeys[key] = value
}
}
return nonConvertibleKeys
}
// Semantic can do semantic deep equality checks for api objects.
// Example: apiequality.Semantic.DeepEqual(aPod, aPodWithNonNilButEmptyMaps) == true
var Semantic = conversion.EqualitiesOrDie(
func(a, b resource.Quantity) bool {
// Ignore formatting, only care that numeric value stayed the same.
// TODO: if we decide it's important, it should be safe to start comparing the format.
//
// Uninitialized quantities are equivalent to 0 quantities.
return a.Cmp(b) == 0
},
func(a, b metav1.Time) bool {
return a.UTC() == b.UTC()
},
func(a, b labels.Selector) bool {
return a.String() == b.String()
},
func(a, b fields.Selector) bool {
return a.String() == b.String()
},
)
var standardResourceQuotaScopes = sets.NewString(
string(api.ResourceQuotaScopeTerminating),
string(api.ResourceQuotaScopeNotTerminating),
string(api.ResourceQuotaScopeBestEffort),
string(api.ResourceQuotaScopeNotBestEffort),
)
// IsStandardResourceQuotaScope returns true if the scope is a standard value
func IsStandardResourceQuotaScope(str string) bool {
return standardResourceQuotaScopes.Has(str)
}
var podObjectCountQuotaResources = sets.NewString(
string(api.ResourcePods),
)
var podComputeQuotaResources = sets.NewString(
string(api.ResourceCPU),
string(api.ResourceMemory),
string(api.ResourceLimitsCPU),
string(api.ResourceLimitsMemory),
string(api.ResourceRequestsCPU),
string(api.ResourceRequestsMemory),
)
// IsResourceQuotaScopeValidForResource returns true if the resource applies to the specified scope
func IsResourceQuotaScopeValidForResource(scope api.ResourceQuotaScope, resource string) bool {
switch scope {
case api.ResourceQuotaScopeTerminating, api.ResourceQuotaScopeNotTerminating, api.ResourceQuotaScopeNotBestEffort:
return podObjectCountQuotaResources.Has(resource) || podComputeQuotaResources.Has(resource)
case api.ResourceQuotaScopeBestEffort:
return podObjectCountQuotaResources.Has(resource)
default:
return true
}
}
var standardContainerResources = sets.NewString(
string(api.ResourceCPU),
string(api.ResourceMemory),
)
// IsStandardContainerResourceName returns true if the container can make a resource request
// for the specified resource
func IsStandardContainerResourceName(str string) bool {
return standardContainerResources.Has(str)
}
// IsOpaqueIntResourceName returns true if the resource name has the opaque
// integer resource prefix.
func IsOpaqueIntResourceName(name api.ResourceName) bool {
return strings.HasPrefix(string(name), api.ResourceOpaqueIntPrefix)
}
// OpaqueIntResourceName returns a ResourceName with the canonical opaque
// integer prefix prepended. If the argument already has the prefix, it is
// returned unmodified.
func OpaqueIntResourceName(name string) api.ResourceName {
if IsOpaqueIntResourceName(api.ResourceName(name)) {
return api.ResourceName(name)
}
return api.ResourceName(fmt.Sprintf("%s%s", api.ResourceOpaqueIntPrefix, name))
}
var standardLimitRangeTypes = sets.NewString(
string(api.LimitTypePod),
string(api.LimitTypeContainer),
string(api.LimitTypePersistentVolumeClaim),
)
// IsStandardLimitRangeType returns true if the type is Pod or Container
func IsStandardLimitRangeType(str string) bool {
return standardLimitRangeTypes.Has(str)
}
var standardQuotaResources = sets.NewString(
string(api.ResourceCPU),
string(api.ResourceMemory),
string(api.ResourceRequestsCPU),
string(api.ResourceRequestsMemory),
string(api.ResourceRequestsStorage),
string(api.ResourceLimitsCPU),
string(api.ResourceLimitsMemory),
string(api.ResourcePods),
string(api.ResourceQuotas),
string(api.ResourceServices),
string(api.ResourceReplicationControllers),
string(api.ResourceSecrets),
string(api.ResourcePersistentVolumeClaims),
string(api.ResourceConfigMaps),
string(api.ResourceServicesNodePorts),
string(api.ResourceServicesLoadBalancers),
)
// IsStandardQuotaResourceName returns true if the resource is known to
// the quota tracking system
func IsStandardQuotaResourceName(str string) bool {
return standardQuotaResources.Has(str)
}
var standardResources = sets.NewString(
string(api.ResourceCPU),
string(api.ResourceMemory),
string(api.ResourceRequestsCPU),
string(api.ResourceRequestsMemory),
string(api.ResourceLimitsCPU),
string(api.ResourceLimitsMemory),
string(api.ResourcePods),
string(api.ResourceQuotas),
string(api.ResourceServices),
string(api.ResourceReplicationControllers),
string(api.ResourceSecrets),
string(api.ResourceConfigMaps),
string(api.ResourcePersistentVolumeClaims),
string(api.ResourceStorage),
string(api.ResourceRequestsStorage),
)
// IsStandardResourceName returns true if the resource is known to the system
func IsStandardResourceName(str string) bool {
return standardResources.Has(str)
}
var integerResources = sets.NewString(
string(api.ResourcePods),
string(api.ResourceQuotas),
string(api.ResourceServices),
string(api.ResourceReplicationControllers),
string(api.ResourceSecrets),
string(api.ResourceConfigMaps),
string(api.ResourcePersistentVolumeClaims),
string(api.ResourceServicesNodePorts),
string(api.ResourceServicesLoadBalancers),
)
// IsIntegerResourceName returns true if the resource is measured in integer values
func IsIntegerResourceName(str string) bool {
return integerResources.Has(str) || IsOpaqueIntResourceName(api.ResourceName(str))
}
// this function aims to check if the service's ClusterIP is set or not
// the objective is not to perform validation here
func IsServiceIPSet(service *api.Service) bool {
return service.Spec.ClusterIP != api.ClusterIPNone && service.Spec.ClusterIP != ""
}
// this function aims to check if the service's cluster IP is requested or not
func IsServiceIPRequested(service *api.Service) bool {
// ExternalName services are CNAME aliases to external ones. Ignore the IP.
if service.Spec.Type == api.ServiceTypeExternalName {
return false
}
return service.Spec.ClusterIP == ""
}
var standardFinalizers = sets.NewString(
string(api.FinalizerKubernetes),
metav1.FinalizerOrphanDependents,
)
// HasAnnotation returns a bool if passed in annotation exists
func HasAnnotation(obj api.ObjectMeta, ann string) bool {
_, found := obj.Annotations[ann]
return found
}
// SetMetaDataAnnotation sets the annotation and value
func SetMetaDataAnnotation(obj *api.ObjectMeta, ann string, value string) {
if obj.Annotations == nil {
obj.Annotations = make(map[string]string)
}
obj.Annotations[ann] = value
}
func IsStandardFinalizerName(str string) bool {
return standardFinalizers.Has(str)
}
// AddToNodeAddresses appends the NodeAddresses to the passed-by-pointer slice,
// only if they do not already exist
func AddToNodeAddresses(addresses *[]api.NodeAddress, addAddresses ...api.NodeAddress) {
for _, add := range addAddresses {
exists := false
for _, existing := range *addresses {
if existing.Address == add.Address && existing.Type == add.Type {
exists = true
break
}
}
if !exists {
*addresses = append(*addresses, add)
}
}
}
func HashObject(obj runtime.Object, codec runtime.Codec) (string, error) {
data, err := runtime.Encode(codec, obj)
if err != nil {
return "", err
}
return fmt.Sprintf("%x", md5.Sum(data)), nil
}
// TODO: make method on LoadBalancerStatus?
func LoadBalancerStatusEqual(l, r *api.LoadBalancerStatus) bool {
return ingressSliceEqual(l.Ingress, r.Ingress)
}
func ingressSliceEqual(lhs, rhs []api.LoadBalancerIngress) bool {
if len(lhs) != len(rhs) {
return false
}
for i := range lhs {
if !ingressEqual(&lhs[i], &rhs[i]) {
return false
}
}
return true
}
func ingressEqual(lhs, rhs *api.LoadBalancerIngress) bool {
if lhs.IP != rhs.IP {
return false
}
if lhs.Hostname != rhs.Hostname {
return false
}
return true
}
// TODO: make method on LoadBalancerStatus?
func LoadBalancerStatusDeepCopy(lb *api.LoadBalancerStatus) *api.LoadBalancerStatus {
c := &api.LoadBalancerStatus{}
c.Ingress = make([]api.LoadBalancerIngress, len(lb.Ingress))
for i := range lb.Ingress {
c.Ingress[i] = lb.Ingress[i]
}
return c
}
// GetAccessModesAsString returns a string representation of an array of access modes.
// modes, when present, are always in the same order: RWO,ROX,RWX.
func GetAccessModesAsString(modes []api.PersistentVolumeAccessMode) string {
modes = removeDuplicateAccessModes(modes)
modesStr := []string{}
if containsAccessMode(modes, api.ReadWriteOnce) {
modesStr = append(modesStr, "RWO")
}
if containsAccessMode(modes, api.ReadOnlyMany) {
modesStr = append(modesStr, "ROX")
}
if containsAccessMode(modes, api.ReadWriteMany) {
modesStr = append(modesStr, "RWX")
}
return strings.Join(modesStr, ",")
}
// GetAccessModesAsString returns an array of AccessModes from a string created by GetAccessModesAsString
func GetAccessModesFromString(modes string) []api.PersistentVolumeAccessMode {
strmodes := strings.Split(modes, ",")
accessModes := []api.PersistentVolumeAccessMode{}
for _, s := range strmodes {
s = strings.Trim(s, " ")
switch {
case s == "RWO":
accessModes = append(accessModes, api.ReadWriteOnce)
case s == "ROX":
accessModes = append(accessModes, api.ReadOnlyMany)
case s == "RWX":
accessModes = append(accessModes, api.ReadWriteMany)
}
}
return accessModes
}
// removeDuplicateAccessModes returns an array of access modes without any duplicates
func removeDuplicateAccessModes(modes []api.PersistentVolumeAccessMode) []api.PersistentVolumeAccessMode {
accessModes := []api.PersistentVolumeAccessMode{}
for _, m := range modes {<|fim▁hole|> }
return accessModes
}
func containsAccessMode(modes []api.PersistentVolumeAccessMode, mode api.PersistentVolumeAccessMode) bool {
for _, m := range modes {
if m == mode {
return true
}
}
return false
}
// ParseRFC3339 parses an RFC3339 date in either RFC3339Nano or RFC3339 format.
func ParseRFC3339(s string, nowFn func() metav1.Time) (metav1.Time, error) {
if t, timeErr := time.Parse(time.RFC3339Nano, s); timeErr == nil {
return metav1.Time{Time: t}, nil
}
t, err := time.Parse(time.RFC3339, s)
if err != nil {
return metav1.Time{}, err
}
return metav1.Time{Time: t}, nil
}
// NodeSelectorRequirementsAsSelector converts the []NodeSelectorRequirement api type into a struct that implements
// labels.Selector.
func NodeSelectorRequirementsAsSelector(nsm []api.NodeSelectorRequirement) (labels.Selector, error) {
if len(nsm) == 0 {
return labels.Nothing(), nil
}
selector := labels.NewSelector()
for _, expr := range nsm {
var op selection.Operator
switch expr.Operator {
case api.NodeSelectorOpIn:
op = selection.In
case api.NodeSelectorOpNotIn:
op = selection.NotIn
case api.NodeSelectorOpExists:
op = selection.Exists
case api.NodeSelectorOpDoesNotExist:
op = selection.DoesNotExist
case api.NodeSelectorOpGt:
op = selection.GreaterThan
case api.NodeSelectorOpLt:
op = selection.LessThan
default:
return nil, fmt.Errorf("%q is not a valid node selector operator", expr.Operator)
}
r, err := labels.NewRequirement(expr.Key, op, expr.Values)
if err != nil {
return nil, err
}
selector = selector.Add(*r)
}
return selector, nil
}
// GetTolerationsFromPodAnnotations gets the json serialized tolerations data from Pod.Annotations
// and converts it to the []Toleration type in api.
func GetTolerationsFromPodAnnotations(annotations map[string]string) ([]api.Toleration, error) {
var tolerations []api.Toleration
if len(annotations) > 0 && annotations[api.TolerationsAnnotationKey] != "" {
err := json.Unmarshal([]byte(annotations[api.TolerationsAnnotationKey]), &tolerations)
if err != nil {
return tolerations, err
}
}
return tolerations, nil
}
// AddOrUpdateTolerationInPod tries to add a toleration to the pod's toleration list.
// Returns true if something was updated, false otherwise.
func AddOrUpdateTolerationInPod(pod *api.Pod, toleration *api.Toleration) bool {
podTolerations := pod.Spec.Tolerations
var newTolerations []api.Toleration
updated := false
for i := range podTolerations {
if toleration.MatchToleration(&podTolerations[i]) {
if Semantic.DeepEqual(toleration, podTolerations[i]) {
return false
}
newTolerations = append(newTolerations, *toleration)
updated = true
continue
}
newTolerations = append(newTolerations, podTolerations[i])
}
if !updated {
newTolerations = append(newTolerations, *toleration)
}
pod.Spec.Tolerations = newTolerations
return true
}
// TolerationToleratesTaint checks if the toleration tolerates the taint.
func TolerationToleratesTaint(toleration *api.Toleration, taint *api.Taint) bool {
if len(toleration.Effect) != 0 && toleration.Effect != taint.Effect {
return false
}
if toleration.Key != taint.Key {
return false
}
// TODO: Use proper defaulting when Toleration becomes a field of PodSpec
if (len(toleration.Operator) == 0 || toleration.Operator == api.TolerationOpEqual) && toleration.Value == taint.Value {
return true
}
if toleration.Operator == api.TolerationOpExists {
return true
}
return false
}
// TaintToleratedByTolerations checks if taint is tolerated by any of the tolerations.
func TaintToleratedByTolerations(taint *api.Taint, tolerations []api.Toleration) bool {
tolerated := false
for i := range tolerations {
if TolerationToleratesTaint(&tolerations[i], taint) {
tolerated = true
break
}
}
return tolerated
}
// GetTaintsFromNodeAnnotations gets the json serialized taints data from Pod.Annotations
// and converts it to the []Taint type in api.
func GetTaintsFromNodeAnnotations(annotations map[string]string) ([]api.Taint, error) {
var taints []api.Taint
if len(annotations) > 0 && annotations[api.TaintsAnnotationKey] != "" {
err := json.Unmarshal([]byte(annotations[api.TaintsAnnotationKey]), &taints)
if err != nil {
return []api.Taint{}, err
}
}
return taints, nil
}
// SysctlsFromPodAnnotations parses the sysctl annotations into a slice of safe Sysctls
// and a slice of unsafe Sysctls. This is only a convenience wrapper around
// SysctlsFromPodAnnotation.
func SysctlsFromPodAnnotations(a map[string]string) ([]api.Sysctl, []api.Sysctl, error) {
safe, err := SysctlsFromPodAnnotation(a[api.SysctlsPodAnnotationKey])
if err != nil {
return nil, nil, err
}
unsafe, err := SysctlsFromPodAnnotation(a[api.UnsafeSysctlsPodAnnotationKey])
if err != nil {
return nil, nil, err
}
return safe, unsafe, nil
}
// SysctlsFromPodAnnotation parses an annotation value into a slice of Sysctls.
func SysctlsFromPodAnnotation(annotation string) ([]api.Sysctl, error) {
if len(annotation) == 0 {
return nil, nil
}
kvs := strings.Split(annotation, ",")
sysctls := make([]api.Sysctl, len(kvs))
for i, kv := range kvs {
cs := strings.Split(kv, "=")
if len(cs) != 2 || len(cs[0]) == 0 {
return nil, fmt.Errorf("sysctl %q not of the format sysctl_name=value", kv)
}
sysctls[i].Name = cs[0]
sysctls[i].Value = cs[1]
}
return sysctls, nil
}
// PodAnnotationsFromSysctls creates an annotation value for a slice of Sysctls.
func PodAnnotationsFromSysctls(sysctls []api.Sysctl) string {
if len(sysctls) == 0 {
return ""
}
kvs := make([]string, len(sysctls))
for i := range sysctls {
kvs[i] = fmt.Sprintf("%s=%s", sysctls[i].Name, sysctls[i].Value)
}
return strings.Join(kvs, ",")
}
// GetAffinityFromPodAnnotations gets the json serialized affinity data from Pod.Annotations
// and converts it to the Affinity type in api.
// TODO: remove when alpha support for affinity is removed
func GetAffinityFromPodAnnotations(annotations map[string]string) (*api.Affinity, error) {
if len(annotations) > 0 && annotations[api.AffinityAnnotationKey] != "" {
var affinity api.Affinity
err := json.Unmarshal([]byte(annotations[api.AffinityAnnotationKey]), &affinity)
if err != nil {
return nil, err
}
return &affinity, nil
}
return nil, nil
}
// GetPersistentVolumeClass returns StorageClassName.
func GetPersistentVolumeClass(volume *api.PersistentVolume) string {
// Use beta annotation first
if class, found := volume.Annotations[api.BetaStorageClassAnnotation]; found {
return class
}
return volume.Spec.StorageClassName
}
// GetPersistentVolumeClaimClass returns StorageClassName. If no storage class was
// requested, it returns "".
func GetPersistentVolumeClaimClass(claim *api.PersistentVolumeClaim) string {
// Use beta annotation first
if class, found := claim.Annotations[api.BetaStorageClassAnnotation]; found {
return class
}
if claim.Spec.StorageClassName != nil {
return *claim.Spec.StorageClassName
}
return ""
}
// PersistentVolumeClaimHasClass returns true if given claim has set StorageClassName field.
func PersistentVolumeClaimHasClass(claim *api.PersistentVolumeClaim) bool {
// Use beta annotation first
if _, found := claim.Annotations[api.BetaStorageClassAnnotation]; found {
return true
}
if claim.Spec.StorageClassName != nil {
return true
}
return false
}<|fim▁end|> | if !containsAccessMode(accessModes, m) {
accessModes = append(accessModes, m)
} |
<|file_name|>vtkRuledSurfaceFilter.py<|end_file_name|><|fim▁begin|># class generated by DeVIDE::createDeVIDEModuleFromVTKObject
from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase
import vtk
class vtkRuledSurfaceFilter(SimpleVTKClassModuleBase):
def __init__(self, module_manager):
SimpleVTKClassModuleBase.__init__(
self, module_manager,
vtk.vtkRuledSurfaceFilter(), 'Processing.',<|fim▁hole|> inputFunctions=None, outputFunctions=None)<|fim▁end|> | ('vtkPolyData',), ('vtkPolyData',),
replaceDoc=True, |
<|file_name|>jquery.bootstrap.wizard.js<|end_file_name|><|fim▁begin|>/*!
* jQuery twitter bootstrap wizard plugin
* Examples and documentation at: http://github.com/VinceG/twitter-bootstrap-wizard
* version 1.0
* Requires jQuery v1.3.2 or later
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
* Authors: Vadim Vincent Gabriel (http://vadimg.com)
*/
;
(function ($) {
var bootstrapWizardCreate = function (element, options) {
var element = $(element);
var obj = this;
// Merge options with defaults
//var $settings = $.extend($.fn.bootstrapWizard.defaults, options || {});
var $settings = $.extend({}, $.fn.bootstrapWizard.defaults, options);
var $activeTab = null;
var $navigation = null;
this.fixNavigationButtons = function () {
// Get the current active tab
if (!$activeTab.length) {
// Select first one
$navigation.find('a:first').tab('show');
$activeTab = $navigation.find('li:first');
}
// See if we currently in the first then disable the previous and last buttons
if (obj.firstIndex() >= obj.currentIndex()) {
$('li.previous', element).addClass('disabled');
} else {
$('li.previous', element).removeClass('disabled');
}
if (obj.currentIndex() >= obj.navigationLength()) {
$('li.next', element).addClass('disabled');
} else {
$('li.next', element).removeClass('disabled');
}
if ($settings.onTabShow && typeof $settings.onTabShow === 'function' && $settings.onTabShow($activeTab, $navigation, obj.currentIndex()) === false) {
return false;
}
};
this.next = function (e) {
// If we clicked the last then dont activate this
if (element.hasClass('last')) {
return false;
}
if ($settings.onNext && typeof $settings.onNext === 'function' && $settings.onNext($activeTab, $navigation, obj.nextIndex()) === false) {
return false;
}
// Did we click the last button
$index = obj.nextIndex();
if ($index > obj.navigationLength()) {
} else {
$navigation.find('li:eq(' + $index + ') a').tab('show');
}
};
this.previous = function (e) {
// If we clicked the first then dont activate this
if (element.hasClass('first')) {
return false;
}
if ($settings.onPrevious && typeof $settings.onPrevious === 'function' && $settings.onPrevious($activeTab, $navigation, obj.previousIndex()) === false) {
return false;
}
$index = obj.previousIndex();
if ($index < 0) {
} else {
$navigation.find('li:eq(' + $index + ') a').tab('show');
}
};
this.first = function (e) {
if ($settings.onFirst && typeof $settings.onFirst === 'function' && $settings.onFirst($activeTab, $navigation, obj.firstIndex()) === false) {
return false;
}
// If the element is disabled then we won't do anything
if (element.hasClass('disabled')) {
return false;
}
$navigation.find('li:eq(0) a').tab('show');
};
this.last = function (e) {
if ($settings.onLast && typeof $settings.onLast === 'function' && $settings.onLast($activeTab, $navigation, obj.lastIndex()) === false) {
return false;
}
// If the element is disabled then we won't do anything
if (element.hasClass('disabled')) {
return false;
}
$navigation.find('li:eq(' + obj.navigationLength() + ') a').tab('show');
};
this.currentIndex = function () {
return $navigation.find('li').index($activeTab);
};
this.firstIndex = function () {
return 0;
};
this.lastIndex = function () {
return obj.navigationLength();
};
this.getIndex = function (elem) {
return $navigation.find('li').index(elem);
};
this.nextIndex = function () {
return $navigation.find('li').index($activeTab) + 1;<|fim▁hole|> this.previousIndex = function () {
return $navigation.find('li').index($activeTab) - 1;
};
this.navigationLength = function () {
return $navigation.find('li').length - 1;
};
this.activeTab = function () {
return $activeTab;
};
this.nextTab = function () {
return $navigation.find('li:eq(' + (obj.currentIndex() + 1) + ')').length ? $navigation.find('li:eq(' + (obj.currentIndex() + 1) + ')') : null;
};
this.previousTab = function () {
if (obj.currentIndex() <= 0) {
return null;
}
return $navigation.find('li:eq(' + parseInt(obj.currentIndex() - 1) + ')');
};
$navigation = element.find('ul:first', element);
$activeTab = $navigation.find('li.active', element);
if (!$navigation.hasClass($settings.class)) {
$navigation.addClass($settings.class);
}
// Load onShow
if ($settings.onInit && typeof $settings.onInit === 'function') {
$settings.onInit($activeTab, $navigation, 0);
}
// Next/Previous events
$($settings.nextSelector, element).bind('click', obj.next);
$($settings.previousSelector, element).bind('click', obj.previous);
$($settings.lastSelector, element).bind('click', obj.last);
$($settings.firstSelector, element).bind('click', obj.first);
// Load onShow
if ($settings.onShow && typeof $settings.onShow === 'function') {
$settings.onShow($activeTab, $navigation, obj.nextIndex());
}
// Work the next/previous buttons
obj.fixNavigationButtons();
$('a[data-toggle="tab"]', element).on('click', function (e) {
if ($settings.onTabClick && typeof $settings.onTabClick === 'function' && $settings.onTabClick($activeTab, $navigation, obj.currentIndex()) === false) {
return false;
}
});
$('a[data-toggle="tab"]', element).on('show', function (e) {
$element = $(e.target).parent();
// If it's disabled then do not change
if ($element.hasClass('disabled')) {
return false;
}
$activeTab = $element; // activated tab
obj.fixNavigationButtons();
});
};
$.fn.bootstrapWizard = function (options) {
return this.each(function (index) {
var element = $(this);
// Return early if this element already has a plugin instance
if (element.data('bootstrapWizard')) return;
// pass options to plugin constructor
var wizard = new bootstrapWizardCreate(element, options);
// Store plugin object in this element's data
element.data('bootstrapWizard', wizard);
});
};
// expose options
$.fn.bootstrapWizard.defaults = {
'class':'nav nav-pills',
'nextSelector':'.wizard li.next',
'previousSelector':'.wizard li.previous',
'firstSelector':'.wizard li.first',
'lastSelector':'.wizard li.last',
'onShow':null,
'onInit':null,
'onNext':null,
'onPrevious':null,
'onLast':null,
'onFirst':null,
'onTabClick':null,
'onTabShow':null
};
})(jQuery);<|fim▁end|> | }; |
<|file_name|>validator.js<|end_file_name|><|fim▁begin|>var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
import * as Rules from './validation_rules';
import { find, partial, uniqBy } from 'lodash';
/**
* Creates a validator instance
*
* @param {array} rules
* @param {object} redux
* @param {object} validations
*
* @example
* let valid = new validator([{
* label: 'email ',
* rules: [{
* rule: 'required',
* message: 'Email address required'
* }]
* }]);
*
* @example with redux binding
* let valid = new validator([{
* label: 'email ',
* rules: [{
* rule: 'required',
* message: 'Email address required'
* }, {
* rule: action1,
* message: 'Some message'
* }]
* }], {
* store: store,
* actions: {
* action1: () => { ... },
* action2: () => { ... }
* }
* });
*
* @example with custom validations
* let valid = new validator([{
* label: 'email ',
* rules: [{
* rule: 'required',
* message: 'Email address required'
* }, {
* label: 'specialString',
* rule: [{
* rule: 'containsSomeWord',
* args: 'foo',
* message: 'This value must have the word "foo"'
* }]
* }]
* }], null, {
* containsSomeWord: (val, word) => val.indexOf(word) > -1
* });
*/
var validator = function () {
function validator(rules, redux, validations, value) {
_classCallCheck(this, validator);
this._rules = rules;
this._val = value ? value : '';
this._redux = redux;
this._validations = validations || {};
this._reduxRules = [];
this._errors = [];
}
validator.prototype.checkRules = function checkRules() {
var _this = this;
var optional = false;
this._errors = this._rules.filter(function (r) {
if (r.rule === 'optional' && _this._val === '') optional = true;
return _this.isRuleValid(r.rule);
});
// only run redux async rules if all synchronous rules have passed
// currently not logging synchronous errors for redux callbacks, use store state
this._reduxRules = uniqBy(this._reduxRules, 'rule');
if (this._reduxRules.length > 0 && this._errors.length < 1) {
this._reduxRules.forEach(function (r) {
return r.promise = _this._redux.store.dispatch(_this._redux.actions[r.rule](_this._val));
});
}
// check for optional override
if (optional) this._errors = [];
};
validator.prototype.hasArgs = function hasArgs(rule) {
return find(this._rules, function (r) {
return r.rule === rule && r.args;
});
};
validator.prototype.isRuleValid = function isRuleValid(rule) {
if (rule && (this._validations[rule] || Rules[rule])) {
var ruleToEval = Rules[rule] || this._validations[rule];
var hasArgs = this.hasArgs(rule);
var method = partial(ruleToEval, this._val);<|fim▁hole|> return hasArgs ? !method(hasArgs.args) : !method();
} else if (rule && this._redux.actions[rule]) {
this._reduxRules.push({ rule: rule, promise: Promise.resolve() });
}
};
validator.prototype.reset = function reset() {
this._errors = [];
};
_createClass(validator, [{
key: 'val',
set: function set(val) {
this._val = val;
},
get: function get() {
return this._val;
}
}, {
key: 'errors',
get: function get() {
return this._errors;
}
}]);
return validator;
}();
export { validator as default };<|fim▁end|> | |
<|file_name|>loghandler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# DummyMP - Multiprocessing Library for Dummies!
# Copyright 2014 Albert Huang.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# DummyMP Library - Logging Redirect Handler
# multiprocessing library for dummies!
# (library for easily running functions in parallel)
#
import logging
import config
import os
class DummyMPLogHandler(logging.Handler):
"""DummyMP logging handler to allow multiprocess logging.
This class is a custom logging handler to allow spawned processes
(from :py:mod:`multiprocessing`) to log without any issues. This
works by intercepting emitted log records, and sending them via
queue to the master process. The master process will process each
record and call :py:meth:`logging.Logger.handle` to emit the
logging record at the master process level.
Note that this class can be used as a general multiprocess logging
handler simply by removing the int_pid attribute.
Attributes:
queue (:py:class:`multiprocessing.Queue`): The Queue object to
forward logging records to.
int_pid (int): The internal PID used to reference the process.
"""
def __init__(self, int_pid, queue):
"""Initializes DummyMPLogHandler with the inputted internal PID
and Queue object."""
logging.Handler.__init__(self)
self.queue = queue
self.int_pid = int_pid
def emit(self, record):
"""Method override to forward logging records to the internal<|fim▁hole|> except:
# Something went wrong...
self.handleError(record)<|fim▁end|> | Queue object."""
try:
# Format: [ [queueMsgID, PID, internal PID], record ]
self.queue.put([[config.DUMMYMP_LOG_ID, os.getpid(), self.int_pid], record]) |
<|file_name|>list_multipart_uploads_request_response.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, 2018, 2019, Oracle and/or its affiliates. All rights reserved.
// Code generated. DO NOT EDIT.
package objectstorage
import (
"github.com/oracle/oci-go-sdk/common"
"net/http"
)
// ListMultipartUploadsRequest wrapper for the ListMultipartUploads operation
type ListMultipartUploadsRequest struct {
// The Object Storage namespace used for the request.
NamespaceName *string `mandatory:"true" contributesTo:"path" name:"namespaceName"`
// The name of the bucket. Avoid entering confidential information.
// Example: `my-new-bucket1`
BucketName *string `mandatory:"true" contributesTo:"path" name:"bucketName"`
// The maximum number of items to return.
Limit *int `mandatory:"false" contributesTo:"query" name:"limit"`
// The page at which to start retrieving results.
Page *string `mandatory:"false" contributesTo:"query" name:"page"`
// The client request ID for tracing.
OpcClientRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-client-request-id"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.
RequestMetadata common.RequestMetadata
}
func (request ListMultipartUploadsRequest) String() string {
return common.PointerString(request)
}
// HTTPRequest implements the OCIRequest interface
func (request ListMultipartUploadsRequest) HTTPRequest(method, path string) (http.Request, error) {
return common.MakeDefaultHTTPRequestWithTaggedStruct(method, path, request)
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request ListMultipartUploadsRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// ListMultipartUploadsResponse wrapper for the ListMultipartUploads operation
type ListMultipartUploadsResponse struct {
// The underlying http response
RawResponse *http.Response
// A list of []MultipartUpload instances
Items []MultipartUpload `presentIn:"body"`
<|fim▁hole|>
// Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular
// request, provide this request ID.
OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
// Paginating a list of multipart uploads.
// In the GET request, set the limit to the number of multipart uploads that you want returned in the response.
// If the opc-next-page header appears in the response, then this is a partial list and there are
// additional multipart uploads to get. Include the header's value as the `page` parameter in the subsequent
// GET request to get the next batch of objects. Repeat this process to retrieve the entire list of
// multipart uploads.
OpcNextPage *string `presentIn:"header" name:"opc-next-page"`
}
func (response ListMultipartUploadsResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface
func (response ListMultipartUploadsResponse) HTTPResponse() *http.Response {
return response.RawResponse
}<|fim▁end|> | // Echoes back the value passed in the opc-client-request-id header, for use by clients when debugging.
OpcClientRequestId *string `presentIn:"header" name:"opc-client-request-id"` |
<|file_name|>ctl_gateway.rs<|end_file_name|><|fim▁begin|>//! Utilities for generating and reading the self-signed certificate for use with the control
//! gateway.
use crate::{crypto::keys::NamedRevision,
tls::rustls_wrapper::{self,
Error as RustlsReadersError}};
use rcgen::{Certificate as RcgenCertificate,
CertificateParams,
DistinguishedName,
DnType,
RcgenError,
PKCS_ECDSA_P256_SHA256};
use rustls::{Certificate,
PrivateKey,
RootCertStore};
use std::{fs::{self,
File},
io::{Error as IoError,
Write},
path::{Path,
PathBuf}};
use thiserror::Error;
use webpki::DnsNameRef;
const NAME_PREFIX: &str = "ctl-gateway";
const CRT_EXTENSION: &str = "crt.pem";
const KEY_EXTENSION: &str = "key.pem";
<|fim▁hole|> FailedToMatchPattern(String),
#[error("ctl gateway TLS file lookup failed, err: {0}")]
RustlsReaders(#[from] RustlsReadersError),
#[error("ctl gateway TLS file generation failed, err: {0}")]
CertificateGeneration(#[from] RcgenError),
#[error("writing the ctl gateway TLS files failed, err: {0}")]
CertificateWrite(#[from] IoError),
}
pub fn generate_self_signed_certificate_and_key(subject_alternate_name: DnsNameRef,
path: impl AsRef<Path>)
-> Result<(), Error> {
let mut params =
CertificateParams::new(vec![Into::<&str>::into(subject_alternate_name).to_string(),
"localhost".to_string(),]);
let mut distinguished_name = DistinguishedName::new();
distinguished_name.push(DnType::OrganizationName,
"Habitat Supervisor Control Gateway");
params.distinguished_name = distinguished_name;
params.alg = &PKCS_ECDSA_P256_SHA256;
let certificate = RcgenCertificate::from_params(params)?;
let crt = certificate.serialize_pem()?;
let key = certificate.serialize_private_key_pem();
fs::create_dir_all(&path)?;
let named_revision = NamedRevision::new(NAME_PREFIX.to_string());
let crt_path = path.as_ref()
.join(format!("{}.{}", named_revision, CRT_EXTENSION));
let mut crt_file = File::create(crt_path)?;
crt_file.write_all(crt.as_bytes())?;
let key_path = path.as_ref()
.join(format!("{}.{}", named_revision, KEY_EXTENSION));
let mut key_file = File::create(key_path)?;
key_file.write_all(key.as_bytes())?;
Ok(())
}
/// Search for files in `search_directory` that match `file_pattern` and return the last match
fn get_last_path(search_directory: impl AsRef<Path>, file_pattern: &str) -> Result<PathBuf, Error> {
let pattern = search_directory.as_ref().join(file_pattern);
let pattern = pattern.to_string_lossy();
glob::glob(&pattern).expect("valid pattern")
.filter_map(std::result::Result::ok)
.filter(|p| p.metadata().map(|m| m.is_file()).unwrap_or(false))
.max()
.ok_or_else(|| Error::FailedToMatchPattern(pattern.to_string()))
}
pub fn latest_certificates(path: impl AsRef<Path>) -> Result<Vec<Certificate>, Error> {
let path = get_last_path(path, &format!("{}-*.{}", NAME_PREFIX, CRT_EXTENSION))?;
Ok(rustls_wrapper::certificates_from_file(&path)?)
}
pub fn latest_private_key(path: impl AsRef<Path>) -> Result<PrivateKey, Error> {
let path = get_last_path(path, &format!("{}-*.{}", NAME_PREFIX, KEY_EXTENSION))?;
Ok(rustls_wrapper::private_key_from_file(&path)?)
}
pub fn latest_root_certificate_store(path: impl AsRef<Path>) -> Result<RootCertStore, Error> {
let path = get_last_path(path, &format!("{}-*.{}", NAME_PREFIX, CRT_EXTENSION))?;
Ok(rustls_wrapper::root_certificate_store_from_file(&path)?)
}
#[cfg(test)]
mod tests {
use super::*;
use std::{fs,
time::Duration};
use tempfile::TempDir;
use webpki::DnsNameRef;
#[test]
fn ctl_gateway_generate_and_read_tls_files() {
let tmpdir = TempDir::new().unwrap();
generate_self_signed_certificate_and_key(DnsNameRef::try_from_ascii_str("a_test_domain").unwrap(), &tmpdir).unwrap();
assert_eq!(fs::read_dir(&tmpdir).unwrap().count(), 2);
let first_path =
get_last_path(&tmpdir, &format!("{}-*.{}", NAME_PREFIX, CRT_EXTENSION)).unwrap();
let certificates = latest_certificates(&tmpdir).unwrap();
assert_eq!(certificates.len(), 1);
latest_private_key(&tmpdir).unwrap();
let root_certificate_store = latest_root_certificate_store(&tmpdir).unwrap();
assert_eq!(root_certificate_store.roots.len(), 1);
// TLS files are named on second boundaries. Wait enough time to guarantee we get a new
// name.
std::thread::sleep(Duration::from_secs(2));
generate_self_signed_certificate_and_key(DnsNameRef::try_from_ascii_str("another_domain").unwrap(), &tmpdir).unwrap();
assert_eq!(fs::read_dir(&tmpdir).unwrap().count(), 4);
let second_path =
get_last_path(&tmpdir, &format!("{}-*.{}", NAME_PREFIX, CRT_EXTENSION)).unwrap();
let certificates = latest_certificates(&tmpdir).unwrap();
assert_eq!(certificates.len(), 1);
latest_private_key(&tmpdir).unwrap();
let root_certificate_store = latest_root_certificate_store(&tmpdir).unwrap();
assert_eq!(root_certificate_store.roots.len(), 1);
assert!(second_path > first_path);
}
}<|fim▁end|> | #[derive(Error, Debug)]
pub enum Error {
#[error("ctl gateway TLS file lookup failed when trying to match files {0}")] |
<|file_name|>EtsyProduct_20170624130727.js<|end_file_name|><|fim▁begin|>import React from 'react'
import '../../styles/gkm-item.scss';
import ProductItem from './ProductItem'
import Input from '../Common/Input'
import SearchBar from '../SearchBar';
import _ from 'lodash'
class EtsyProduct extends React.Component {
constructor(props, context) {
super(props, context);
this.editables = {
name: {
max: 20,
display: 'Name'
},
shortDescription: {
max: 100,
display: 'Short Description'
},
twitterTitle: {
max: 100,
display: 'Twitter Text'
}
};
}
onUpdateField(update) {
console.log(update)
let product = this.props.product
_.each(update, (v, k) => {
product[k] = v;
})
this.props.updateItem(product);
}
getListingById(id) {
return _.find(this.props.listings, (l) => l.id === id)
}
editableFieldsHtml() {
const { product } = this.props
return (
<div>
{_.map(this.editables, (v, fld) => (
<Input
title={v.display}
fld={fld}
value={product[fld]}
id={product.id}
onUpdate={
(i, update) => this.onUpdateField(update)
}
/>))
}
</div>
)
}
onAddtoCategory(category) {
const { product } = this.props;
addItemtoCategory(product, category)
}
render() {
const { product, listings, addListingToProduct } = this.props;
const loader = (<div>loading...</div >)
const productItems = product.populatedListings.map((listing) => (listing ? < ProductItem key={listing.id} product={listing} inProduct={true} /> : loader))
return (
<div className='gkm-etsy-product' id={product.id}>
<h5>Etsy Product</h5>
<div>
Product Name: {product.name}
<div>Categories:</div>
{_.map(product.hashtags.all(), (hashtag) => (<div>{hashtag}</div>))}
<Input title="add category" fld='category' resetOnClick={true} button={{ text: 'ADD!', action: this.onAddtoCategory }} />
{this.editableFieldsHtml()}
</div>
{productItems}
<div> Add another listing: </div>
<SearchBar products={listings}
onSelect={
(listingId) => {
addListingToProduct(product, this.getListingById(listingId))
}
} />
</div>
)
}
}
/*
Item.propTypes = {
};
*/
<|fim▁hole|>export default EtsyProduct<|fim▁end|> | |
<|file_name|>ProbesListSetupAction.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2009--2010 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public License,
* version 2 (GPLv2). There is NO WARRANTY for this software, express or
* implied, including the implied warranties of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
* along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.<|fim▁hole|> * Red Hat trademarks are not licensed under GPLv2. No permission is
* granted to use or replicate Red Hat trademarks that are incorporated
* in this software or its documentation.
*/
package com.redhat.rhn.frontend.action.systems.monitoring;
import com.redhat.rhn.domain.server.Server;
import com.redhat.rhn.frontend.struts.RequestContext;
import com.redhat.rhn.frontend.struts.RhnAction;
import com.redhat.rhn.frontend.taglibs.list.helper.ListHelper;
import com.redhat.rhn.frontend.taglibs.list.helper.Listable;
import com.redhat.rhn.manager.monitoring.MonitoringManager;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* ProbesListSetupAction
* @version $Rev: 59372 $
*/
public class ProbesListSetupAction extends RhnAction implements Listable {
/**
*
* {@inheritDoc}
*/
public ActionForward execute(ActionMapping mapping,
ActionForm formIn,
HttpServletRequest request,
HttpServletResponse response) {
ListHelper helper = new ListHelper(this, request);
helper.execute();
RequestContext requestContext = new RequestContext(request);
Server server = requestContext.lookupAndBindServer();
request.setAttribute("sid", server.getId());
return mapping.findForward("default");
}
/**
*
* {@inheritDoc}
*/
public List getResult(RequestContext rctx) {
Server server = rctx.lookupAndBindServer();
return MonitoringManager.getInstance().
probesForSystem(rctx.getCurrentUser(), server, null);
}
}<|fim▁end|> | * |
<|file_name|>submenu_test.js<|end_file_name|><|fim▁begin|>// Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.provide('goog.ui.SubMenuTest');
goog.setTestOnly('goog.ui.SubMenuTest');
goog.require('goog.a11y.aria');
goog.require('goog.a11y.aria.State');
goog.require('goog.dom');
goog.require('goog.dom.classlist');
goog.require('goog.events');
goog.require('goog.events.Event');
goog.require('goog.events.KeyCodes');
goog.require('goog.events.KeyHandler');
goog.require('goog.functions');
goog.require('goog.positioning');
goog.require('goog.positioning.Overflow');
goog.require('goog.style');
goog.require('goog.testing.MockClock');
goog.require('goog.testing.events');
goog.require('goog.testing.jsunit');
goog.require('goog.ui.Component');
goog.require('goog.ui.Menu');
goog.require('goog.ui.MenuItem');
goog.require('goog.ui.SubMenu');
goog.require('goog.ui.SubMenuRenderer');
var menu;
var clonedMenuDom;
var mockClock;
// mock out goog.positioning.positionAtCoordinate so that
// the menu always fits. (we don't care about testing the
// dynamic menu positioning if the menu doesn't fit in the window.)
var oldPositionFn = goog.positioning.positionAtCoordinate;
goog.positioning.positionAtCoordinate = function(
absolutePos, movableElement, movableElementCorner, opt_margin,
opt_overflow) {
return oldPositionFn.call(
null, absolutePos, movableElement, movableElementCorner, opt_margin,
goog.positioning.Overflow.IGNORE);
};
function setUp() {
clonedMenuDom = goog.dom.getElement('demoMenu').cloneNode(true);
menu = new goog.ui.Menu();
}
function tearDown() {
document.body.style.direction = 'ltr';
menu.dispose();
var element = goog.dom.getElement('demoMenu');
element.parentNode.replaceChild(clonedMenuDom, element);
goog.dom.removeChildren(goog.dom.getElement('sandbox'));
if (mockClock) {
mockClock.uninstall();
mockClock = null;
}
}
function assertKeyHandlingIsCorrect(keyToOpenSubMenu, keyToCloseSubMenu) {
menu.setFocusable(true);
menu.decorate(goog.dom.getElement('demoMenu'));
var KeyCodes = goog.events.KeyCodes;
var plainItem = menu.getChildAt(0);
plainItem.setMnemonic(KeyCodes.F);
var subMenuItem1 = menu.getChildAt(1);
subMenuItem1.setMnemonic(KeyCodes.S);
var subMenuItem1Menu = subMenuItem1.getMenu();
menu.setHighlighted(plainItem);
var fireKeySequence = goog.testing.events.fireKeySequence;
assertTrue(
'Expected OpenSubMenu key to not be handled',
fireKeySequence(plainItem.getElement(), keyToOpenSubMenu));
assertFalse(subMenuItem1Menu.isVisible());
assertFalse(
'Expected F key to be handled',
fireKeySequence(plainItem.getElement(), KeyCodes.F));
assertFalse(
'Expected DOWN key to be handled',
fireKeySequence(plainItem.getElement(), KeyCodes.DOWN));
assertEquals(subMenuItem1, menu.getChildAt(1));
assertFalse(
'Expected OpenSubMenu key to be handled',
fireKeySequence(subMenuItem1.getElement(), keyToOpenSubMenu));
assertTrue(subMenuItem1Menu.isVisible());
assertFalse(
'Expected CloseSubMenu key to be handled',
fireKeySequence(subMenuItem1.getElement(), keyToCloseSubMenu));
assertFalse(subMenuItem1Menu.isVisible());
assertFalse(
'Expected UP key to be handled',
fireKeySequence(subMenuItem1.getElement(), KeyCodes.UP));
assertFalse(
'Expected S key to be handled',
fireKeySequence(plainItem.getElement(), KeyCodes.S));
assertTrue(subMenuItem1Menu.isVisible());
}
function testKeyHandling_ltr() {
assertKeyHandlingIsCorrect(
goog.events.KeyCodes.RIGHT, goog.events.KeyCodes.LEFT);
}
function testKeyHandling_rtl() {
document.body.style.direction = 'rtl';
assertKeyHandlingIsCorrect(
goog.events.KeyCodes.LEFT, goog.events.KeyCodes.RIGHT);
}
function testNormalLtrSubMenu() {
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
assertArrowDirection(subMenu, false);
assertRenderDirection(subMenu, false);
assertArrowPosition(subMenu, false);
}
function testNormalRtlSubMenu() {
document.body.style.direction = 'rtl';
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
assertArrowDirection(subMenu, true);
assertRenderDirection(subMenu, true);
assertArrowPosition(subMenu, true);
}
function testLtrSubMenuAlignedToStart() {
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setAlignToEnd(false);
assertArrowDirection(subMenu, true);
assertRenderDirection(subMenu, true);
assertArrowPosition(subMenu, false);
}
function testNullContentElement() {
var subMenu = new goog.ui.SubMenu();
subMenu.setContent('demo');
}
function testRtlSubMenuAlignedToStart() {
document.body.style.direction = 'rtl';
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setAlignToEnd(false);
assertArrowDirection(subMenu, false);
assertRenderDirection(subMenu, false);
assertArrowPosition(subMenu, true);
}
function testSetContentKeepsArrow_ltr() {
document.body.style.direction = 'ltr';
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setAlignToEnd(false);
subMenu.setContent('test');
assertArrowDirection(subMenu, true);
assertRenderDirection(subMenu, true);
assertArrowPosition(subMenu, false);
}
function testSetContentKeepsArrow_rtl() {
document.body.style.direction = 'rtl';
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setAlignToEnd(false);
subMenu.setContent('test');
assertArrowDirection(subMenu, false);
assertRenderDirection(subMenu, false);
assertArrowPosition(subMenu, true);
}
function testExitDocument() {
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
var innerMenu = subMenu.getMenu();
assertTrue('Top-level menu was not in document', menu.isInDocument());
assertTrue('Submenu was not in document', subMenu.isInDocument());
assertTrue('Inner menu was not in document', innerMenu.isInDocument());
menu.exitDocument();
assertFalse('Top-level menu was in document', menu.isInDocument());
assertFalse('Submenu was in document', subMenu.isInDocument());
assertFalse('Inner menu was in document', innerMenu.isInDocument());
}
function testDisposal() {
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
var innerMenu = subMenu.getMenu();
menu.dispose();
assert('Top-level menu was not disposed', menu.getDisposed());
assert('Submenu was not disposed', subMenu.getDisposed());
assert('Inner menu was not disposed', innerMenu.getDisposed());
}
function testShowAndDismissSubMenu() {
var openEventDispatched = false;
var closeEventDispatched = false;
function handleEvent(e) {
switch (e.type) {
case goog.ui.Component.EventType.OPEN:
openEventDispatched = true;
break;
case goog.ui.Component.EventType.CLOSE:
closeEventDispatched = true;
break;
default:
fail('Invalid event type: ' + e.type);
}
}
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setHighlighted(true);
goog.events.listen(
subMenu,
[goog.ui.Component.EventType.OPEN, goog.ui.Component.EventType.CLOSE],
handleEvent);
assertFalse(
'Submenu must not have "-open" CSS class',
goog.dom.classlist.contains(subMenu.getElement(), 'goog-submenu-open'));
assertFalse('Popup menu must not be visible', subMenu.getMenu().isVisible());
assertFalse('No OPEN event must have been dispatched', openEventDispatched);
assertFalse('No CLOSE event must have been dispatched', closeEventDispatched);
subMenu.showSubMenu();
assertTrue(
'Submenu must have "-open" CSS class',
goog.dom.classlist.contains(subMenu.getElement(), 'goog-submenu-open'));
assertTrue('Popup menu must be visible', subMenu.getMenu().isVisible());
assertTrue('OPEN event must have been dispatched', openEventDispatched);
assertFalse('No CLOSE event must have been dispatched', closeEventDispatched);
subMenu.dismissSubMenu();
assertFalse(
'Submenu must not have "-open" CSS class',
goog.dom.classlist.contains(subMenu.getElement(), 'goog-submenu-open'));
assertFalse('Popup menu must not be visible', subMenu.getMenu().isVisible());
assertTrue('CLOSE event must have been dispatched', closeEventDispatched);
goog.events.unlisten(
subMenu,
[goog.ui.Component.EventType.OPEN, goog.ui.Component.EventType.CLOSE],
handleEvent);
}
function testDismissWhenSubMenuNotVisible() {
var openEventDispatched = false;
var closeEventDispatched = false;
function handleEvent(e) {
switch (e.type) {
case goog.ui.Component.EventType.OPEN:
openEventDispatched = true;
break;
case goog.ui.Component.EventType.CLOSE:
closeEventDispatched = true;
break;
default:
fail('Invalid event type: ' + e.type);
}
}
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setHighlighted(true);
goog.events.listen(
subMenu,
[goog.ui.Component.EventType.OPEN, goog.ui.Component.EventType.CLOSE],
handleEvent);
assertFalse(
'Submenu must not have "-open" CSS class',
goog.dom.classlist.contains(subMenu.getElement(), 'goog-submenu-open'));
assertFalse('Popup menu must not be visible', subMenu.getMenu().isVisible());
assertFalse('No OPEN event must have been dispatched', openEventDispatched);
assertFalse('No CLOSE event must have been dispatched', closeEventDispatched);
subMenu.showSubMenu();
subMenu.getMenu().setVisible(false);
subMenu.dismissSubMenu();
assertFalse(
'Submenu must not have "-open" CSS class',
goog.dom.classlist.contains(subMenu.getElement(), 'goog-submenu-open'));
assertFalse(subMenu.menuIsVisible_);
assertFalse('Popup menu must not be visible', subMenu.getMenu().isVisible());
assertTrue('CLOSE event must have been dispatched', closeEventDispatched);
goog.events.unlisten(
subMenu,
[goog.ui.Component.EventType.OPEN, goog.ui.Component.EventType.CLOSE],
handleEvent);
}
function testCloseSubMenuBehavior() {
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.getElement().id = 'subMenu';
var innerMenu = subMenu.getMenu();
innerMenu.getChildAt(0).getElement().id = 'child1';
subMenu.setHighlighted(true);
subMenu.showSubMenu();
function MyFakeEvent(keyCode, opt_eventType) {
this.type = opt_eventType || goog.events.KeyHandler.EventType.KEY;
this.keyCode = keyCode;
this.propagationStopped = false;
this.preventDefault = goog.nullFunction;
this.stopPropagation = function() { this.propagationStopped = true; };
}
// Focus on the first item in the submenu and verify the activedescendant is
// set correctly.
subMenu.handleKeyEvent(new MyFakeEvent(goog.events.KeyCodes.DOWN));
assertEquals(
'First item in submenu must be the aria-activedescendant', 'child1',
goog.a11y.aria.getState(
menu.getElement(), goog.a11y.aria.State.ACTIVEDESCENDANT));
// Dismiss the submenu and verify the activedescendant is updated correctly.
subMenu.handleKeyEvent(new MyFakeEvent(goog.events.KeyCodes.LEFT));
assertEquals(
'Submenu must be the aria-activedescendant', 'subMenu',
goog.a11y.aria.getState(
menu.getElement(), goog.a11y.aria.State.ACTIVEDESCENDANT));
}
function testLazyInstantiateSubMenu() {
menu.decorate(goog.dom.getElement('demoMenu'));
var subMenu = menu.getChildAt(1);
subMenu.setHighlighted(true);
var lazyMenu;
var key = goog.events.listen(
subMenu, goog.ui.Component.EventType.OPEN, function(e) {
lazyMenu = new goog.ui.Menu();
lazyMenu.addItem(new goog.ui.MenuItem('foo'));
lazyMenu.addItem(new goog.ui.MenuItem('bar'));
subMenu.setMenu(lazyMenu, /* opt_internal */ false);
});
subMenu.showSubMenu();
assertNotNull('Popup menu must have been created', lazyMenu);
assertEquals(
'Popup menu must be a child of the submenu', subMenu,
lazyMenu.getParent());
assertTrue('Popup menu must have been rendered', lazyMenu.isInDocument());
assertTrue('Popup menu must be visible', lazyMenu.isVisible());
menu.dispose();
assertTrue('Submenu must have been disposed of', subMenu.isDisposed());
assertFalse(
'Popup menu must not have been disposed of', lazyMenu.isDisposed());
lazyMenu.dispose();
goog.events.unlistenByKey(key);
}
function testReusableMenu() {
var subMenuOne = new goog.ui.SubMenu('SubMenu One');
var subMenuTwo = new goog.ui.SubMenu('SubMenu Two');
menu.addItem(subMenuOne);
menu.addItem(subMenuTwo);
menu.render(goog.dom.getElement('sandbox'));
// It is possible for the same popup menu to be shared between different
// submenus.
var sharedMenu = new goog.ui.Menu();
sharedMenu.addItem(new goog.ui.MenuItem('Hello'));
sharedMenu.addItem(new goog.ui.MenuItem('World'));
assertNull('Shared menu must not have a parent', sharedMenu.getParent());
subMenuOne.setMenu(sharedMenu);
assertEquals(
'SubMenuOne must point to the shared menu', sharedMenu,
subMenuOne.getMenu());
assertEquals(
'SubMenuOne must be the shared menu\'s parent', subMenuOne,
sharedMenu.getParent());
subMenuTwo.setMenu(sharedMenu);
assertEquals(
'SubMenuTwo must point to the shared menu', sharedMenu,
subMenuTwo.getMenu());
assertEquals(
'SubMenuTwo must be the shared menu\'s parent', subMenuTwo,
sharedMenu.getParent());
assertEquals(
'SubMenuOne must still point to the shared menu', sharedMenu,
subMenuOne.getMenu());
menu.setHighlighted(subMenuOne);
subMenuOne.showSubMenu();
assertEquals(
'SubMenuOne must point to the shared menu', sharedMenu,
subMenuOne.getMenu());
assertEquals(
'SubMenuOne must be the shared menu\'s parent', subMenuOne,
sharedMenu.getParent());
assertEquals(
'SubMenuTwo must still point to the shared menu', sharedMenu,
subMenuTwo.getMenu());
assertTrue('Shared menu must be visible', sharedMenu.isVisible());
menu.setHighlighted(subMenuTwo);
subMenuTwo.showSubMenu();
assertEquals(
'SubMenuTwo must point to the shared menu', sharedMenu,
subMenuTwo.getMenu());
assertEquals(
'SubMenuTwo must be the shared menu\'s parent', subMenuTwo,
sharedMenu.getParent());
assertEquals(
'SubMenuOne must still point to the shared menu', sharedMenu,
subMenuOne.getMenu());
assertTrue('Shared menu must be visible', sharedMenu.isVisible());
}
/**
* If you remove a submenu in the interval between when a mouseover event
* is fired on it, and showSubMenu() is called, showSubMenu causes a null
* value to be dereferenced. This test validates that the fix for this works.
* (See bug 1823144).
*/
function testDeleteItemDuringSubmenuDisplayInterval() {
mockClock = new goog.testing.MockClock(true);
var submenu = new goog.ui.SubMenu('submenu');
submenu.addItem(new goog.ui.MenuItem('submenu item 1'));
menu.addItem(submenu);
// Trigger mouseover, and remove item before showSubMenu can be called.
var e = new goog.events.Event();
submenu.handleMouseOver(e);
menu.removeItem(submenu);
mockClock.tick(goog.ui.SubMenu.MENU_DELAY_MS);
// (No JS error should occur.)
}
function testShowSubMenuAfterRemoval() {
var submenu = new goog.ui.SubMenu('submenu');
menu.addItem(submenu);
menu.removeItem(submenu);
submenu.showSubMenu();
// (No JS error should occur.)
}
/**
* Tests that if a sub menu is selectable, then it can handle actions.
*/
function testSubmenuSelectable() {
var submenu = new goog.ui.SubMenu('submenu');
submenu.addItem(new goog.ui.MenuItem('submenu item 1'));
menu.addItem(submenu);
submenu.setSelectable(true);
var numClicks = 0;
var menuClickedFn = function(e) { numClicks++; };
goog.events.listen(
submenu, goog.ui.Component.EventType.ACTION, menuClickedFn);
submenu.performActionInternal(null);
submenu.performActionInternal(null);
assertEquals('The submenu should have fired an event', 2, numClicks);
submenu.setSelectable(false);
submenu.performActionInternal(null);
assertEquals(
'The submenu should not have fired any further events', 2, numClicks);
}
/**
* Tests that if a sub menu is checkable, then it can handle actions.
*/
function testSubmenuCheckable() {
var submenu = new goog.ui.SubMenu('submenu');
submenu.addItem(new goog.ui.MenuItem('submenu item 1'));
menu.addItem(submenu);
submenu.setCheckable(true);
var numClicks = 0;
var menuClickedFn = function(e) { numClicks++; };
goog.events.listen(
submenu, goog.ui.Component.EventType.ACTION, menuClickedFn);
submenu.performActionInternal(null);
submenu.performActionInternal(null);
assertEquals('The submenu should have fired an event', 2, numClicks);
submenu.setCheckable(false);
submenu.performActionInternal(null);
assertEquals(
'The submenu should not have fired any further events', 2, numClicks);
}
/**
* Tests that entering a child menu cancels the dismiss timer for the submenu.
*/
function testEnteringChildCancelsDismiss() {
var submenu = new goog.ui.SubMenu('submenu');
submenu.isInDocument = goog.functions.TRUE;
submenu.addItem(new goog.ui.MenuItem('submenu item 1'));
menu.addItem(submenu);
mockClock = new goog.testing.MockClock(true);
submenu.getMenu().setVisible(true);
// This starts the dismiss timer.
submenu.setHighlighted(false);
// This should cancel the dismiss timer.
submenu.getMenu().dispatchEvent(goog.ui.Component.EventType.ENTER);
// Tick the length of the dismiss timer.
mockClock.tick(goog.ui.SubMenu.MENU_DELAY_MS);
// Check that the menu is now highlighted and still visible.
assertTrue(submenu.getMenu().isVisible());
assertTrue(submenu.isHighlighted());
}
/**
* Asserts that this sub menu renders in the right direction relative to
* the parent menu.
* @param {goog.ui.SubMenu} subMenu The sub menu.
* @param {boolean} left True for left-pointing, false for right-pointing.
*/
function assertRenderDirection(subMenu, left) {
subMenu.getParent().setHighlighted(subMenu);
subMenu.showSubMenu();
var menuItemPosition = goog.style.getPageOffset(subMenu.getElement());
var menuPosition = goog.style.getPageOffset(subMenu.getMenu().getElement());
assert(Math.abs(menuItemPosition.y - menuPosition.y) < 5);
assertEquals(
'Menu at: ' + menuPosition.x + ', submenu item at: ' + menuItemPosition.x,
left, menuPosition.x < menuItemPosition.x);
}
<|fim▁hole|>/**
* Asserts that this sub menu has a properly-oriented arrow.
* @param {goog.ui.SubMenu} subMenu The sub menu.
* @param {boolean} left True for left-pointing, false for right-pointing.
*/
function assertArrowDirection(subMenu, left) {
assertEquals(
left ? goog.ui.SubMenuRenderer.LEFT_ARROW_ :
goog.ui.SubMenuRenderer.RIGHT_ARROW_,
getArrowElement(subMenu).innerHTML);
}
/**
* Asserts that the arrow position is correct.
* @param {goog.ui.SubMenu} subMenu The sub menu.
* @param {boolean} leftAlign True for left-aligned, false for right-aligned.
*/
function assertArrowPosition(subMenu, left) {
var arrow = getArrowElement(subMenu);
var expectedLeft =
left ? 0 : arrow.offsetParent.offsetWidth - arrow.offsetWidth;
var actualLeft = arrow.offsetLeft;
assertTrue(
'Expected left offset: ' + expectedLeft + '\n' +
'Actual left offset: ' + actualLeft + '\n',
Math.abs(expectedLeft - actualLeft) < 5);
}
/**
* Gets the arrow element of a sub menu.
* @param {goog.ui.SubMenu} subMenu The sub menu.
* @return {Element} The arrow.
*/
function getArrowElement(subMenu) {
return subMenu.getContentElement().lastChild;
}<|fim▁end|> | |
<|file_name|>21e4e35689f6_bug_993786_update_crash_adu_by_build_.py<|end_file_name|><|fim▁begin|>"""bug 993786 - update_crash_adu_by_build_signature-bad-buildids
Revision ID: 21e4e35689f6<|fim▁hole|>"""
# revision identifiers, used by Alembic.
revision = '21e4e35689f6'
down_revision = '224f0fda6ecb'
from alembic import op
from socorrolib.lib import citexttype, jsontype, buildtype
from socorrolib.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
load_stored_proc(op, ['update_crash_adu_by_build_signature.sql'])
def downgrade():
load_stored_proc(op, ['update_crash_adu_by_build_signature.sql'])<|fim▁end|> | Revises: 224f0fda6ecb
Create Date: 2014-04-08 18:46:19.755028
|
<|file_name|>parse.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import re
import string
import sys
import os
USAGE = 'USAGE: parse.y <player.h> <playercore_casts.i> <playercore_arraysofclasses.i> <Jplayercore> <playercore> <player.java>'
if __name__ == '__main__':
if len(sys.argv) != 7:
print USAGE
sys.exit(-1)
infilename = sys.argv[1]
outfilename = sys.argv[2]
aofcfilename = sys.argv[3]
outdir = sys.argv[4]
pcoutdir = sys.argv[5]
pcjfilename = sys.argv[6]
os.system('mkdir -p ' + outdir)
os.system('mkdir -p ' + pcoutdir)
# Read in the entire file
infile = open(infilename, 'r')
instream = infile.read()
infile.close()
outfile = open(outfilename, 'w+')
aofcfile = open(aofcfilename, 'w+')
pcjfile = open(pcoutdir + '/' + pcjfilename, 'w+')
# strip C++-style comments
pattern = re.compile('//.*')
instream = pattern.sub('', instream)
# strip C-style comments
pattern = re.compile('/\*.*?\*/', re.MULTILINE | re.DOTALL)
instream = pattern.sub('', instream)
# strip blank lines
pattern = re.compile('^\s*?\n', re.MULTILINE)
instream = pattern.sub('', instream)
# find structs
pattern = re.compile('typedef\s+struct\s+player_\w+[^}]+\}[^;]+',
re.MULTILINE)
structs = pattern.findall(instream)
print 'Found ' + `len(structs)` + ' struct(s)'
contentspattern = re.compile('.*\{\s*(.*?)\s*\}', re.MULTILINE | re.DOTALL)
declpattern = re.compile('\s*([^;]*?;)', re.MULTILINE)
typepattern = re.compile('\s*\S+')
variablepattern = re.compile('\s*([^,;]+?)\s*[,;]')
#arraypattern = re.compile('\[\s*(\w*?)\s*\]')
arraypattern = re.compile('\[(.*?)\]')
outfile.write('%inline\n%{\n\n')
pcjfile.write('package net.sourceforge.playerstage.Jplayercore;\n')
pcjfile.write('public class player {\n\n')
for s in structs:
# extract type of struct
split = string.split(s)
typename = split[-1]
# pick out the contents of the struct
varpart = contentspattern.findall(s)
if len(varpart) != 1:
print 'skipping nested / empty struct ' + typename
continue
# SWIG macro that lets us access arrays of this non-primitive type
# as Java arrays
aofcfile.write('JAVA_ARRAYSOFCLASSES(' + typename +')\n')
buf_to_name = 'buf_to_' + typename
buf_from_name = typename + '_to_buf'
buf_to_Jname = 'buf_to_J' + typename
buf_from_Jname = 'J' + typename + '_to_buf'
sizeof_name = typename + '_sizeof'
# function to return the size of the underlying C structure
outfile.write('size_t ' + sizeof_name + '(void)\n')
outfile.write('{\n')
outfile.write(' return(sizeof(' + typename + '));\n')
outfile.write('}\n')
# JNI cast from a void* to a pointer to this type
outfile.write(typename + '* ' + buf_to_name + '(void* buf)\n')
outfile.write('{\n')
outfile.write(' return((' + typename + '*)(buf));\n')
outfile.write('}\n')
# JNI cast from a pointer to this type to a void*
outfile.write('void* ' + buf_from_name + '(' + typename + '* msg)\n')
outfile.write('{\n')
outfile.write(' return((void*)(msg));\n')
outfile.write('}\n')
# Equivalent non-JNI Java class
jclass = 'J' + typename
jfile = open(outdir + '/' + jclass + '.java', 'w+')
jfile.write('package net.sourceforge.playerstage.Jplayercore;\n')
jfile.write('import java.io.Serializable;\n')
jfile.write('public class ' + jclass + ' implements Serializable {\n')
jfile.write(' public final static long serialVersionUID = ' + `hash(s)` + 'L;\n')
jclass_constructor = ' public ' + jclass + '() {\n';
# Static method in class player to convert from JNI Java object to
# non-JNI java object
pcj_data_to_jdata = ''
pcj_data_to_jdata += ' public static ' + jclass + ' ' + typename + '_to_' + jclass + '(' + typename + ' data) {\n'
pcj_data_to_jdata += ' ' + jclass + ' Jdata = new ' + jclass + '();\n'
# Static method in class player to convert from non-JNI Java object to
# JNI java object
pcj_jdata_to_data = ''
pcj_jdata_to_data += ' public static ' + typename + ' ' + jclass + '_to_' + typename + '(' + jclass + ' Jdata) {\n'
pcj_jdata_to_data += ' ' + typename + ' data = new ' + typename + '();\n'
# Static method in class playercore to convert from SWIGTYPE_p_void
# to non-JNI Java object.
pcjfile.write(' public static ' + jclass + ' ' + buf_to_Jname + '(SWIGTYPE_p_void buf) {\n')
pcjfile.write(' ' + typename + ' data = playercore_java.' + buf_to_name + '(buf);\n')
pcjfile.write(' return(' + typename + '_to_' + jclass + '(data));\n')
pcjfile.write(' }\n\n')
# Static method in class playercore to convert non-JNI Java object to
# SWIGTYPE_p_void.
pcjfile.write(' public static SWIGTYPE_p_void ' + buf_from_Jname + '(' + jclass + ' Jdata) {\n')
pcjfile.write(' ' + typename + ' data = ' + jclass + '_to_' + typename + '(Jdata);\n')
pcjfile.write(' return(playercore_java.' + buf_from_name + '(data));\n')
pcjfile.write(' }\n\n')
# separate the variable declarations
decls = declpattern.finditer(varpart[0])
for d in decls:
# find the type and variable names in this declaration
dstring = d.string[d.start(1):d.end(1)]
type = typepattern.findall(dstring)[0]
dstring = typepattern.sub('', dstring, 1)
vars = variablepattern.finditer(dstring)
# Do some name mangling for common types
builtin_type = 1
if type == 'int64_t':
jtype = 'long'
elif type == 'uint64_t':
jtype = 'long'
elif type == 'int32_t':
jtype = 'int'
elif type == 'uint32_t':
jtype = 'long'
elif type == 'int16_t':
jtype = 'short'
elif type == 'uint16_t':
jtype = 'int'
elif type == 'int8_t':
jtype = 'byte'
elif type == 'uint8_t':
jtype = 'short'
elif type == 'char':
jtype = 'char'
elif type == 'bool_t':
jtype = 'boolean'
elif type == 'double':
jtype = 'double'
elif type == 'float':
jtype = 'float'
else:
# rely on a previous declaration of a J class for this type
jtype = 'J' + type
builtin_type = 0
# iterate through each variable
for var in vars:
varstring = var.string[var.start(1):var.end(1)]
# is it an array or a scalar?
arraysize = arraypattern.findall(varstring)
if len(arraysize) > 0:
arraysize = arraysize[0]<|fim▁hole|> jfile.write(' public ' + jtype + '[] ' + varstring + ';\n')
#if builtin_type == 0:
if jtype != 'char':
if arraysize.isdigit():
jclass_constructor += ' ' + varstring + ' = new ' + jtype + '[' + arraysize + '];\n'
else:
jclass_constructor += ' ' + varstring + ' = new ' + jtype + '[playercore_javaConstants.' + arraysize + '];\n'
else:
arraysize = ''
jfile.write(' public ' + jtype + ' ' + varstring + ';\n')
if builtin_type == 0:
jclass_constructor += ' ' + varstring + ' = new ' + jtype + '();\n'
capvarstring = string.capitalize(varstring[0]) + varstring[1:]
if builtin_type:
pcj_data_to_jdata += ' Jdata.' + varstring + ' = data.get' + capvarstring + '();\n'
pcj_jdata_to_data += ' data.set' + capvarstring + '(Jdata.' + varstring +');\n'
else:
if arraysize == '':
pcj_data_to_jdata += ' Jdata.' + varstring + ' = ' + type + '_to_' + jtype + '(data.get' + capvarstring + '());\n'
pcj_jdata_to_data += ' data.set' + capvarstring + '(' + jtype + '_to_' + type + '(Jdata.' + varstring + '));\n'
else:
try:
asize = int(arraysize)
except:
arraysize = 'playercore_javaConstants.' + arraysize
pcj_data_to_jdata += ' {\n'
pcj_data_to_jdata += ' ' + type + ' foo[] = data.get' + capvarstring + '();\n'
pcj_data_to_jdata += ' for(int i=0;i<' + arraysize + ';i++)\n'
pcj_data_to_jdata += ' Jdata.' + varstring + '[i] = ' + type + '_to_' + jtype + '(foo[i]);\n'
pcj_data_to_jdata += ' }\n'
pcj_jdata_to_data += ' {\n'
pcj_jdata_to_data += ' ' + type + ' foo[] = new ' + type + '[' + arraysize + '];\n'
pcj_jdata_to_data += ' for(int i=0;i<' + arraysize + ';i++)\n'
pcj_jdata_to_data += ' foo[i] = ' + jtype + '_to_' + type + '(Jdata.' + varstring + '[i]);\n'
pcj_jdata_to_data += ' data.set' + capvarstring + '(foo);\n'
pcj_jdata_to_data += ' }\n'
pcj_data_to_jdata += ' return(Jdata);\n'
pcj_data_to_jdata += ' }\n\n'
pcjfile.write(pcj_data_to_jdata)
pcj_jdata_to_data += ' return(data);\n'
pcj_jdata_to_data += ' }\n\n'
pcjfile.write(pcj_jdata_to_data)
jclass_constructor += ' }\n'
jfile.write(jclass_constructor)
jfile.write('}\n')
jfile.close()
outfile.write('\n%}\n')
outfile.close()
pcjfile.write('\n}\n')
pcjfile.close()
aofcfile.close()<|fim▁end|> | varstring = arraypattern.sub('', varstring)
if jtype == 'char':
jfile.write(' public String ' + varstring + ';\n')
else: |
<|file_name|>strsync.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# strsync - Automatically translate and synchronize .strings files from defined base language.
# Copyright (c) 2015 metasmile [email protected] (github.com/metasmile)
from __future__ import print_function
import strparser, strparser_intentdefinition, strlocale, strtrans
import time, os, sys, argparse, codecs, csv
from os.path import expanduser
from fuzzywuzzy import fuzz
from colorama import init
from colorama import Fore, Back, Style
import unicodedata2
init(autoreset=True)
def len_unicode(ustr):
return len(unicodedata2.normalize('NFC', ustr.decode('utf-8')))
def resolve_file_path(file):
return os.path.join(os.path.dirname(__file__), file)
def join_path_all(target_dir, target_files):
return map(lambda f: os.path.join(target_dir, f), target_files)
def rget(dictionary, key):
items = []
if key in dictionary:
items.append(dictionary[key])
for dict_value in [value for value in dictionary.values() if isinstance(value, dict)]:
items += rget(dict_value, key)
return items
def main():
parser = argparse.ArgumentParser(
description='Automatically translate and synchronize .strings files from defined base language.')
parser.add_argument('-b', '--base-lang-name',
help='A base(or source) localizable resource name.(default=\'Base\'), (e.g. "Base" via \'Base.lproj\', "en" via \'en.lproj\')',
default='Base', required=False)
parser.add_argument('-x', '--excluding-lang-names', type=str,
help='A localizable resource name that you want to exclude. (e.g. "Base" via \'Base.lproj\', "en" via \'en.lproj\')',
default=[], required=False, nargs='+')
parser.add_argument('-f', '--force-translate-keys', type=str,
help='Keys in the strings to update and translate by force. (input nothing for all keys.)',
default=[], required=False, nargs='*')
parser.add_argument('-o', '--following-base-keys', type=str, help='Keys in the strings to follow from "Base.',
default=[], required=False, nargs='+')
parser.add_argument('-w', '--following-base-if-not-exists', type=str, help='With this option, all keys will be followed up with base values if they does not exist.',
default=None, required=False, nargs='*')
parser.add_argument('-l', '--cutting-length-ratio-with-base', type=float,
help='Keys in the float as the ratio to compare the length of "Base"',
default=[], required=False, nargs='+')
parser.add_argument('-c', '--ignore-comments', help='Allows ignoring comment synchronization.', default=None,
required=False, nargs='*')
parser.add_argument('-v', '--verify-results', help='Verify translated results via reversed results', default=None,
required=False, nargs='*')
parser.add_argument('-s', '--include-secondary-languages', help='Include Additional Secondary Languages. (+63 language codes)', default=None,
required=False, nargs='*')
parser.add_argument('-i', '--ignore-unverified-results',
help='Allows ignoring unverified results when appending them.', default=None, required=False,
nargs='*')
parser.add_argument('target path', help='Target localization resource path. (root path of Base.lproj, default=./)',
default='./', nargs='?')
parser.add_argument('only for keys', help='Some specified keys for exclusive work. All operations will work for only that keys therefore other keys will be ignored. Not specified by default. (default=None)',
default=None, nargs='*')
args = vars(parser.parse_args())
reload(sys)
sys.setdefaultencoding('utf-8')
# configure arguments
__LOCALE_XCODE_BASE_LOWERCASE__ = 'base'
__DIR_SUFFIX__ = ".lproj"
__FILE_SUFFIX__ = ".strings"
__FILE_INTENT_SUFFIX__ = ".intentdefinition"
__FILE_DICT_SUFFIX__ = ".stringsdict"
__RESOURCE_PATH__ = expanduser(args['target path'])
__ONLY_FOR_KEYS__ = args['only for keys']
__BASE_LANG__ = args['base_lang_name']
__EXCLUDING_LANGS__ = args['excluding_lang_names']
__KEYS_FORCE_TRANSLATE__ = args['force_translate_keys']
__KEYS_FORCE_TRANSLATE_ALL__ = ('--force-translate-keys' in sys.argv or '-f' in sys.argv) and not __KEYS_FORCE_TRANSLATE__
__KEYS_FOLLOW_BASE__ = args['following_base_keys']
__CUTTING_LENGTH_RATIO__ = (args['cutting_length_ratio_with_base'] or [0])[0]
__FOLLOWING_ALL_KEYS_IFNOT_EXIST__ = args['following_base_if_not_exists'] is not None
__IGNORE_COMMENTS__ = args['ignore_comments'] is not None
__IGNORE_UNVERIFIED_RESULTS__ = args['ignore_unverified_results'] is not None
__RATIO_TO_IGNORE_UNVERIFIED_RESULTS__ = int(
args['ignore_unverified_results'][0]) if __IGNORE_UNVERIFIED_RESULTS__ and len(
args['ignore_unverified_results']) else 0
__VERIFY_TRANS_RESULTS__ = __IGNORE_UNVERIFIED_RESULTS__ or args['verify_results'] is not None
__INCLUDE_SECONDARY_LANGUAGES__ = args['include_secondary_languages'] is not None
# Locale settings
# [language designator] en, fr
# [language designator]_[region designator] en_GB, zh_HK
# [language designator]-[script designator] az-Arab, zh-Hans
# [language designator]-[script designator]_[region designator] zh-Hans_HK
print('(i) Initializing for supported languages ...')
__lang_codes = strlocale.default_supporting_xcode_lang_codes()
if __INCLUDE_SECONDARY_LANGUAGES__:
__lang_codes += strlocale.secondary_supporting_xcode_lang_codes()
__XCODE_LPROJ_SUPPORTED_LOCALES_MAP__ = strlocale.map_locale_codes(__lang_codes, strtrans.supported_locale_codes())
__XCODE_LPROJ_SUPPORTED_LOCALES__ = __XCODE_LPROJ_SUPPORTED_LOCALES_MAP__.keys()
print(Fore.WHITE + '(i) Supported numbers of locale code :', str(len(__XCODE_LPROJ_SUPPORTED_LOCALES__)),
Style.RESET_ALL)
print(__XCODE_LPROJ_SUPPORTED_LOCALES__)
# handle base
if __BASE_LANG__.endswith(__DIR_SUFFIX__):
__BASE_RESOUCE_DIR__ = __BASE_LANG__
__BASE_LANG__ = __BASE_LANG__.split(__DIR_SUFFIX__)[0]
else:
__BASE_RESOUCE_DIR__ = __BASE_LANG__ + __DIR_SUFFIX__
if not __BASE_LANG__.lower() == __LOCALE_XCODE_BASE_LOWERCASE__:
__BASE_LANG__ = strlocale.lang(__BASE_LANG__)
# setup Translator & langs
# read ios langs
print(Fore.WHITE + '(i) Fetching supported locale codes for ios9 ...', Style.RESET_ALL)
__IOS9_CODES__ = [lang_row[0] for lang_row in
csv.reader(open(resolve_file_path('lc_ios9.tsv'), 'rb'), delimiter='\t')]
print(Fore.WHITE + '(i) Supported numbers of locale code :', len(__IOS9_CODES__), Style.RESET_ALL)
global_result_logs = {}
def merge_two_dicts(x, y):
'''Given two dicts, merge them into a new dict as a shallow copy.'''
z = x.copy()
z.update(y)
return z
# core function
def synchronize(target_file, lc): #add,remove, update (translate or copy from base)
# parse target file
target_kv = {}
target_kc = {}
target_error_lines = []
if not notexist_or_empty_file(target_file):
parsed_strings = strparser.parse_strings(filename=target_file)
for item in parsed_strings:
k, e = item['key'], item['error']
# line error
if e:
target_error_lines.append(e)
if not target_error_lines:
target_kv[k] = item['value']
target_kc[k] = item['comment']
# parsing complete or return.
if target_error_lines:
print('(!) Syntax error - Skip')
return False, None, None, target_error_lines
# base
base_content = base_dict[os.path.basename(target_file)]
base_kv = {}
base_kc = {}
for item in base_content:
k, e = item['key'], item['error']
# line error
if e:
print('(!) WARNING : Syntax error from Base -> ', k, ':', e)
base_kv[k] = item['value']
base_kc[k] = item['comment']
force_adding_keys = base_kv.keys() if __KEYS_FORCE_TRANSLATE_ALL__ else __KEYS_FORCE_TRANSLATE__
adding_keys = list(
((set(base_kv.keys()) - set(target_kv.keys())) | (set(base_kv.keys()) & set(force_adding_keys))) \
- set(base_kv.keys() if __FOLLOWING_ALL_KEYS_IFNOT_EXIST__ else __KEYS_FOLLOW_BASE__) \
)
removing_keys = list(set(target_kv.keys()) - set(base_kv.keys()))
existing_keys = list(set(base_kv.keys()) - (set(adding_keys) | set(removing_keys)))
# Filter if __ONLY_FOR_KEYS__ option activated
if __ONLY_FOR_KEYS__:
adding_keys = list(set(adding_keys) & set(__ONLY_FOR_KEYS__))
removing_keys = list(set(removing_keys) & set(__ONLY_FOR_KEYS__))
existing_keys = list(set(existing_keys) & set(__ONLY_FOR_KEYS__))
updated_keys = []
"""
perform translate
"""
translated_kv = {}
reversed_matched_kv = {} # {"ratio":float, "ignored":True|False}
reversed_translated_kv = {}
if len(adding_keys):
print('Translating...')
translated_kv = dict(zip(adding_keys, strtrans.translate_strs([base_kv[k] for k in adding_keys], lc)))
if __VERIFY_TRANS_RESULTS__:
print('Reversing results and matching...')
reversed_translated_kv = dict(
zip(adding_keys, strtrans.translate_strs([translated_kv[_ak] for _ak in adding_keys], 'en')))
for bk in adding_keys:
if bk in reversed_translated_kv:
ratio = fuzz.partial_ratio(base_kv[bk], reversed_translated_kv[bk])
should_ignore = __IGNORE_UNVERIFIED_RESULTS__ and ratio <= __RATIO_TO_IGNORE_UNVERIFIED_RESULTS__
if should_ignore:
translated_kv[bk] = base_kv[bk] # copy from base set
reversed_matched_kv[bk] = {"ratio": ratio, "ignored": should_ignore}
updated_content = []
for item in base_content:
k = item['key']
newitem = dict.fromkeys(item.keys())
newitem['key'] = k
target_value, target_comment = target_kv.get(k), target_kc.get(k)
newitem['value'] = target_value or item['value']
newitem['comment'] = target_comment if __IGNORE_COMMENTS__ else target_comment or base_kc[k]
needs_update_comment = False if __IGNORE_COMMENTS__ else not target_comment and base_kc[k]
# added
if k in adding_keys:
if k in translated_kv:
newitem['value'] = translated_kv[k]
if not newitem['comment']:
newitem['comment'] = 'Translated from: {0}'.format(base_kv[k])
reversed_matched_msg = ''
if k in reversed_matched_kv:
reversed_matched_msg = Fore.CYAN + "({}% Matched{}: \'{}\' <- \'{}\' <- \'{}\')".format(
reversed_matched_kv[k]["ratio"],
", So ignored [X]" if reversed_matched_kv[k]["ignored"] else "", reversed_translated_kv[k],
newitem['value'], base_kv[k]) + Style.RESET_ALL
print('[Add] "{0}" = "{1}" <- {2}'.format(k, newitem['value'], base_kv[k]), reversed_matched_msg)
else:
newitem['value'] = target_kv[k]
if not newitem['comment']:
newitem['comment'] = 'Translate failed from: {0}'.format(base_kv[k])
print(Fore.RED + '[Error] "{0}" = "{1}" X <- {2}'.format(k, newitem['value'],
base_kv[k]) + Style.RESET_ALL)
# exists
elif k in existing_keys:
if k != "Base" and __CUTTING_LENGTH_RATIO__>0:
if target_value != base_kv[k] \
and len_unicode(target_value) > float(len_unicode(base_kv[k]))*__CUTTING_LENGTH_RATIO__ \
or needs_update_comment:
print(Fore.YELLOW + '(!) Length of "', target_value, '" is longer than"', base_kv[k], '" as',
len(target_value), '>', len(base_kv[k]), Style.RESET_ALL)
newitem['value'] = base_kv[k]
updated_keys.append(k)
if not lc in global_result_logs:
global_result_logs[lc] = {}
global_result_logs[lc][k] = (target_value, base_kv[k])
else:
newitem['value'] = target_value or base_kv[k]
elif k in __KEYS_FOLLOW_BASE__:
newitem['value'] = base_kv[k]
if target_value != base_kv[k] or needs_update_comment:
updated_keys.append(k)
else:
newitem['value'] = target_value or base_kv[k]
if not target_value or needs_update_comment:
updated_keys.append(k)
updated_content.append(newitem)
# removed or wrong
for k in removing_keys:
print(Fore.RED + '[Remove]', k, Style.RESET_ALL)
if len(adding_keys) or len(updated_keys) or len(removing_keys):
print(Fore.WHITE + '(i) Changed Keys: Added {0}, Updated {1}, Removed {2}'.format(len(adding_keys),
len(updated_keys),
len(removing_keys)),
Style.RESET_ALL)
# check verification failed items
target_verified_items = None
if len(reversed_matched_kv):
target_verified_items = {
k: {'ratio': reversed_matched_kv[k]["ratio"], 'original': base_kv[k],
'reversed': reversed_translated_kv[k],
'translated': translated_kv[k]} for k in reversed_matched_kv.keys()}
return updated_content and (len(adding_keys) > 0 or len(updated_keys) > 0 or len(
removing_keys) > 0), updated_content, translated_kv, target_error_lines, target_verified_items
def write_file(target_file, parsed_list):
suc = False
try:
f = codecs.open(target_file, "w", "utf-8")
contents = ''
for content in parsed_list:
if content['comment']:
contents += '/*{0}*/'.format(content['comment']) + '\n'
contents += '"{0}" = "{1}";'.format(content['key'], content['value']) + '\n'
f.write(contents)
suc = True
except IOError:
print('IOError to open', target_file)
finally:
f.close()
return suc
def remove_file(target_file):
try:
os.rename(target_file, target_file + '.deleted')
return True
except IOError:
print('IOError to rename', target_file)
return False
def create_file(target_file):
open(target_file, 'a').close()
def notexist_or_empty_file(target_file):
return not os.path.exists(target_file) or os.path.getsize(target_file) == 0
def resolve_file_names(target_file_names):
return map(lambda f: f.decode('utf-8'), filter(lambda f: f.endswith(__FILE_SUFFIX__) or f.endswith(__FILE_INTENT_SUFFIX__), target_file_names))
base_dict = {}
results_dict = {}
# Get Base Language Specs
walked = list(os.walk(__RESOURCE_PATH__, topdown=True))
# Init with Base.lproj
for dir, subdirs, files in walked:
if os.path.basename(dir) == __BASE_RESOUCE_DIR__:
for _file in resolve_file_names(files):
f = os.path.join(dir, _file)
if notexist_or_empty_file(f):
continue
parsed_objs = None
# parse .strings
if f.endswith(__FILE_SUFFIX__):
parsed_objs = strparser.parse_strings(filename=f)
# parse .intentdefinition
elif f.endswith(__FILE_INTENT_SUFFIX__):
print('[i] Found "{0}" in {1}. Parse ....'.format(os.path.basename(f), __BASE_RESOUCE_DIR__))
parsed_objs = strparser_intentdefinition.parse_strings(f)
# replace to dest extenstion .strings
_file = _file.replace(__FILE_INTENT_SUFFIX__, __FILE_SUFFIX__)
# write original .strings file to local
write_file(os.path.join(dir, _file), parsed_objs)
if not parsed_objs:
continue
base_dict[_file] = parsed_objs
if not base_dict:
print('[!] Not found "{0}" in target path "{1}"'.format(__BASE_RESOUCE_DIR__, __RESOURCE_PATH__))
sys.exit(0)
# Exist or Create supporting lproj dirs.
print('Check and verifiy resources ...')
current_lproj_names = [os.path.splitext(os.path.basename(lproj_path))[0] for lproj_path in
filter(lambda d: d.endswith(__DIR_SUFFIX__), [dir for dir, subdirs, files in walked])]
notexisted_lproj_names = list(set(__XCODE_LPROJ_SUPPORTED_LOCALES__) - set(current_lproj_names))
creating_lproj_dirs = [expanduser(os.path.join(__RESOURCE_PATH__, ln + __DIR_SUFFIX__)) for ln in
notexisted_lproj_names]
if creating_lproj_dirs:
print('Following lproj dirs does not exists. Creating ...')
for d in creating_lproj_dirs:
print('Created', d)
os.mkdir(d)
# Start to sync localizable files.
print('Start synchronizing...')
for file in base_dict:
print('Target:', file)
for dir, subdirs, files in walked:
files = resolve_file_names(files)
if dir.endswith((__DIR_SUFFIX__)):
lproj_name = os.path.basename(dir).split(__DIR_SUFFIX__)[0]
if lproj_name == __BASE_LANG__:
continue
if not lproj_name in __XCODE_LPROJ_SUPPORTED_LOCALES_MAP__:
print('Does not supported: ', lproj_name)
continue
lc = __XCODE_LPROJ_SUPPORTED_LOCALES_MAP__[lproj_name]
if strlocale.matched_locale_code(lc, __EXCLUDING_LANGS__):
print('Skip: ', lc)
continue
results_dict[lc] = {
'deleted_files': [],
'added_files': [],
'updated_files': [],
'skipped_files': [],
'translated_files_lines': {},
'error_lines_kv': {},
'verified_result': {}
}
# if not supported_lang(lc):
# print('Does not supported: ', lc)
# results_dict[lc]['skipped_files'] = join_path_all(dir, files)
# continue
print('\n', 'Analayzing localizables... {1} (at {0})'.format(dir, lc))
added_files = list(set(base_dict.keys()) - set(files))
removed_files = list(set(files) - set(base_dict.keys()))
existing_files = list(set(files) - (set(added_files) | set(removed_files)))
added_files = join_path_all(dir, added_files)
removed_files = join_path_all(dir, removed_files)
existing_files = join_path_all(dir, existing_files)
added_cnt, updated_cnt, removed_cnt = 0, 0, 0
translated_files_lines = results_dict[lc]['translated_files_lines']
error_files = results_dict[lc]['error_lines_kv']
<|fim▁hole|> removed_cnt += 1
# add - file
for added_file in added_files:
print('Adding File... {0}'.format(added_file))
create_file(added_file)
u, c, t, e, m = synchronize(added_file, lc)
# error
if e:
error_files[added_file] = e
# normal
elif u and write_file(added_file, c):
added_cnt += 1
translated_files_lines[added_file] = t
# verify failed
for k in (m or {}):
results_dict[lc]['verified_result'][k] = m[k]
# exist - lookup lines
for ext_file in existing_files:
u, c, t, e, m = synchronize(ext_file, lc)
# error
if e:
error_files[ext_file] = e
# normal
elif u:
print('Updating File... {0}'.format(ext_file))
if write_file(ext_file, c):
updated_cnt = +1
translated_files_lines[ext_file] = t
# verify failed
for k in (m or {}):
results_dict[lc]['verified_result'][k] = m[k]
if added_cnt or updated_cnt or removed_cnt or error_files:
print(Fore.WHITE + '(i) Changed Files : Added {0}, Updated {1}, Removed {2}, Error {3}'.format(
added_cnt, updated_cnt, removed_cnt, len(error_files.keys())), Style.RESET_ALL)
else:
print('Nothing to translate or add.')
"""
Results
"""
results_dict[lc]['deleted_files'] = removed_files
results_dict[lc]['added_files'] = list(set(added_files) & set(translated_files_lines.keys()))
results_dict[lc]['updated_files'] = list(set(existing_files) & set(translated_files_lines.keys()))
if error_files:
print(error_files)
results_dict[lc]['error_lines_kv'] = error_files
# print(total Results)
print('')
t_file_cnt = \
t_line_cnt = \
file_add_cnt = \
file_add_cnt = \
file_remove_cnt = \
file_update_cnt = \
file_skip_cnt = \
0
for lc in results_dict.keys():
result_lc = results_dict[lc]
file_add_cnt += len(result_lc['added_files'])
file_remove_cnt += len(result_lc['deleted_files'])
file_update_cnt += len(result_lc['updated_files'])
file_skip_cnt += len(result_lc['skipped_files'])
for f in result_lc['added_files']: print('Added', f)
for f in result_lc['deleted_files']: print('Removed', f)
for f in result_lc['updated_files']: print('Updated', f)
for f in result_lc['skipped_files']: print('Skiped', f)
tfiles = result_lc['translated_files_lines']
if tfiles:
# print('============ Results for langcode : {0} ============='.format(lc))
for f in tfiles:
t_file_cnt += 1
if len(tfiles[f]):
# print('', f)
for key in tfiles[f]:
t_line_cnt += 1
# print(key, ' = ', tfiles[f][key])
for lc in global_result_logs.keys():
print(lc)
for t in global_result_logs[lc].keys():
o, b = global_result_logs[lc][t]
print(o.decode('utf-8'), ' -> ', b)
print('')
# WARN
found_warining = filter(lambda i: i or None, rget(results_dict, 'error_lines_kv'))
if found_warining:
print(
Fore.YELLOW + '\n[!] WARNING: Found strings that contains the syntax error. Please confirm.' + Style.RESET_ALL)
for a in found_warining:
for k in a:
print('at', k)
for i in a[k]:
print(' ', i)
# VERIFY FAILED
verified_results = filter(lambda i: i or None, rget(results_dict, 'verified_result'))
if verified_results and len(verified_results):
print(
Fore.GREEN + '\n[i] VERIFIED RESULTS: Matched ratio via reversed translation results. Please confirm.' + Style.RESET_ALL)
for lc in results_dict:
print(lc)
vr = results_dict[lc]['verified_result']
for k in vr:
vd = vr[k]
status_msg = Fore.RED + '(Ignored) ' + Style.RESET_ALL if __IGNORE_UNVERIFIED_RESULTS__ and vd[
'ratio'] <= __RATIO_TO_IGNORE_UNVERIFIED_RESULTS__ else ''
print(' {}{}: {} -> {} -> {}, Matched: {}%'.format(status_msg, k
, vd['original']
, vd['translated']
, vd['reversed']
, str(vd['ratio'])))
print('')
if file_add_cnt or file_update_cnt or file_remove_cnt or file_skip_cnt:
print('Total New Translated Strings : {0}'.format(t_line_cnt))
print('Changed Files Total : Added {0}, Updated {1}, Removed {2}, Skipped {3}'.format(file_add_cnt,
file_update_cnt,
file_remove_cnt,
file_skip_cnt))
print("Synchronized.")
else:
print("All strings are already synchronized. Nothing to translate or add.")
return<|fim▁end|> | # remove - file
for removed_file in removed_files:
print('Removing File... {0}'.format(removed_file))
if remove_file(removed_file): |
<|file_name|>FindFragment.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2016 JustWayward Team
* <p><|fim▁hole|> * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.justwayward.reader.ui.fragment;
import android.content.Intent;
import android.net.Uri;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import com.justwayward.reader.R;
import com.justwayward.reader.base.BaseFragment;
import com.justwayward.reader.bean.support.FindBean;
import com.justwayward.reader.common.OnRvItemClickListener;
import com.justwayward.reader.component.AppComponent;
import com.justwayward.reader.ui.activity.SubjectBookListActivity;
import com.justwayward.reader.ui.activity.TopCategoryListActivity;
import com.justwayward.reader.ui.activity.TopRankActivity;
import com.justwayward.reader.ui.adapter.FindAdapter;
import com.justwayward.reader.view.SupportDividerItemDecoration;
import java.util.ArrayList;
import java.util.List;
import butterknife.Bind;
/**
* 发现
*
* @author yuyh.
* @date 16/9/1.
*/
public class FindFragment extends BaseFragment implements OnRvItemClickListener<FindBean> {
@Bind(R.id.recyclerview)
RecyclerView mRecyclerView;
private FindAdapter mAdapter;
private List<FindBean> mList = new ArrayList<>();
@Override
public int getLayoutResId() {
return R.layout.fragment_find;
}
@Override
public void initDatas() {
mList.clear();
mList.add(new FindBean("排行榜", R.drawable.home_find_rank));
mList.add(new FindBean("主题书单", R.drawable.home_find_topic));
mList.add(new FindBean("分类", R.drawable.home_find_category));
mList.add(new FindBean("官方QQ群", R.drawable.home_find_listen));
}
@Override
public void configViews() {
mRecyclerView.setHasFixedSize(true);
mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
mRecyclerView.addItemDecoration(new SupportDividerItemDecoration(mContext, LinearLayoutManager.VERTICAL, true));
mAdapter = new FindAdapter(mContext, mList, this);
mRecyclerView.setAdapter(mAdapter);
}
@Override
protected void setupActivityComponent(AppComponent appComponent) {
}
@Override
public void attachView() {
}
@Override
public void onItemClick(View view, int position, FindBean data) {
switch (position) {
case 0:
TopRankActivity.startActivity(activity);
break;
case 1:
SubjectBookListActivity.startActivity(activity);
break;
case 2:
startActivity(new Intent(activity, TopCategoryListActivity.class));
break;
case 3:
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://jq.qq.com/?_wv=1027&k=46qbql8")));
break;
default:
break;
}
}
}<|fim▁end|> | * Licensed under the Apache License, Version 2.0 (the "License"); |
<|file_name|>protocol.go<|end_file_name|><|fim▁begin|>package liverpc
import "encoding/json"
const (<|fim▁hole|>
_cmdReqType = byte('0')
)
type protoHeader struct {
magic uint32
timestamp uint32
checkSum uint32
version uint32
reserved uint32
seq uint32
length uint32
cmd []byte
}
type protoReq struct {
Header protoHeader
Body []byte
}
type protoResp struct {
Header protoHeader
Body []byte
}
// Args .
type Args struct {
Header *Header `json:"header"`
Body interface{} `json:"body"`
HTTP interface{} `json:"http"`
}
// Reply .
type Reply struct {
Code int `json:"code"`
Message string `json:"msg"`
Data json.RawMessage `json:"data"`
}<|fim▁end|> | _magic = 2233
_headerLen = 60 |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use std::process::Command;
static AFL_SRC_PATH: &str = "AFLplusplus";
<|fim▁hole|>// https://github.com/rust-fuzz/afl.rs/issues/148
#[cfg(target_os = "macos")]
static AR_CMD: &str = "/usr/bin/ar";
#[cfg(not(target_os = "macos"))]
static AR_CMD: &str = "ar";
#[path = "src/common.rs"]
mod common;
fn main() {
build_afl(&common::afl_dir());
build_afl_llvm_runtime();
}
fn build_afl(out_dir: &Path) {
let mut command = Command::new("make");
command
.current_dir(AFL_SRC_PATH)
.args(&["clean", "all", "install"])
// skip the checks for the legacy x86 afl-gcc compiler
.env("AFL_NO_X86", "1")
// build just the runtime to avoid troubles with Xcode clang on macOS
.env("NO_BUILD", "1")
.env("DESTDIR", out_dir)
.env("PREFIX", "");
let status = command.status().expect("could not run 'make'");
assert!(status.success());
}
fn build_afl_llvm_runtime() {
std::fs::copy(
Path::new(&AFL_SRC_PATH).join("afl-compiler-rt.o"),
common::object_file_path(),
)
.expect("Couldn't copy object file");
let status = Command::new(AR_CMD)
.arg("r")
.arg(common::archive_file_path())
.arg(common::object_file_path())
.status()
.expect("could not run 'ar'");
assert!(status.success());
}<|fim▁end|> | |
<|file_name|>compositor_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Communication with the compositor thread.
use SendableFrameTree;
use compositor::CompositingReason;
use euclid::point::Point2D;
use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::{Key, KeyModifiers, KeyState, PipelineId};
use net_traits::image::base::Image;
use profile_traits::mem;
use profile_traits::time;
use script_traits::{AnimationState, ConstellationMsg, EventResult};
use std::fmt::{Debug, Error, Formatter};
use std::sync::mpsc::{Receiver, Sender};
use style_traits::cursor::Cursor;
use style_traits::viewport::ViewportConstraints;
use url::Url;
use webrender;
use webrender_traits;
/// Sends messages to the compositor. This is a trait supplied by the port because the method used
/// to communicate with the compositor may have to kick OS event loops awake, communicate cross-
/// process, and so forth.
pub trait CompositorProxy : 'static + Send {
/// Sends a message to the compositor.
fn send(&self, msg: Msg);
/// Clones the compositor proxy.
fn clone_compositor_proxy(&self) -> Box<CompositorProxy + 'static + Send>;
}
/// The port that the compositor receives messages on. As above, this is a trait supplied by the
/// Servo port.
pub trait CompositorReceiver : 'static {
/// Receives the next message inbound for the compositor. This must not block.
fn try_recv_compositor_msg(&mut self) -> Option<Msg>;
/// Synchronously waits for, and returns, the next message inbound for the compositor.
fn recv_compositor_msg(&mut self) -> Msg;
}
/// A convenience implementation of `CompositorReceiver` for a plain old Rust `Receiver`.
impl CompositorReceiver for Receiver<Msg> {
fn try_recv_compositor_msg(&mut self) -> Option<Msg> {
self.try_recv().ok()
}
fn recv_compositor_msg(&mut self) -> Msg {
self.recv().unwrap()
}
}
pub trait RenderListener {
fn recomposite(&mut self, reason: CompositingReason);
}
impl RenderListener for Box<CompositorProxy + 'static> {
fn recomposite(&mut self, reason: CompositingReason) {
self.send(Msg::Recomposite(reason));
}
}
/// Messages from the painting thread and the constellation thread to the compositor thread.
pub enum Msg {
/// Requests that the compositor shut down.
Exit,
/// Informs the compositor that the constellation has completed shutdown.
/// Required because the constellation can have pending calls to make
/// (e.g. SetFrameTree) at the time that we send it an ExitMsg.
ShutdownComplete,
/// Scroll a page in a window
ScrollFragmentPoint(PipelineId, Point2D<f32>, bool),
/// Alerts the compositor that the current page has changed its title.
ChangePageTitle(PipelineId, Option<String>),
/// Alerts the compositor that the current page has changed its URL.
ChangePageUrl(PipelineId, Url),
/// Alerts the compositor that the given pipeline has changed whether it is running animations.<|fim▁hole|> ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Replaces the current frame tree, typically called during main frame navigation.
SetFrameTree(SendableFrameTree, IpcSender<()>),
/// The load of a page has begun: (can go back, can go forward).
LoadStart(bool, bool),
/// The load of a page has completed: (can go back, can go forward, is root frame).
LoadComplete(bool, bool, bool),
/// We hit the delayed composition timeout. (See `delayed_composition.rs`.)
DelayedCompositionTimeout(u64),
/// Composite.
Recomposite(CompositingReason),
/// Sends an unconsumed key event back to the compositor.
KeyEvent(Option<char>, Key, KeyState, KeyModifiers),
/// Script has handled a touch event, and either prevented or allowed default actions.
TouchEventProcessed(EventResult),
/// Changes the cursor.
SetCursor(Cursor),
/// Composite to a PNG file and return the Image over a passed channel.
CreatePng(IpcSender<Option<Image>>),
/// Alerts the compositor that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
/// A reply to the compositor asking if the output image is stable.
IsReadyToSaveImageReply(bool),
/// A favicon was detected
NewFavicon(Url),
/// <head> tag finished parsing
HeadParsed,
/// Collect memory reports and send them back to the given mem::ReportsChan.
CollectMemoryReports(mem::ReportsChan),
/// A status message to be displayed by the browser chrome.
Status(Option<String>),
/// Get Window Informations size and position
GetClientWindow(IpcSender<(Size2D<u32>, Point2D<i32>)>),
/// Move the window to a point
MoveTo(Point2D<i32>),
/// Resize the window to size
ResizeTo(Size2D<u32>),
/// Pipeline visibility changed
PipelineVisibilityChanged(PipelineId, bool),
/// WebRender has successfully processed a scroll. The boolean specifies whether a composite is
/// needed.
NewScrollFrameReady(bool),
/// A pipeline was shut down.
// This message acts as a synchronization point between the constellation,
// when it shuts down a pipeline, to the compositor; when the compositor
// sends a reply on the IpcSender, the constellation knows it's safe to
// tear down the other threads associated with this pipeline.
PipelineExited(PipelineId, IpcSender<()>),
/// Runs a closure in the compositor thread.
/// It's used to dispatch functions from webrender to the main thread's event loop.
/// Required to allow WGL GLContext sharing in Windows.
Dispatch(Box<Fn() + Send>)
}
impl Debug for Msg {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match *self {
Msg::Exit => write!(f, "Exit"),
Msg::ShutdownComplete => write!(f, "ShutdownComplete"),
Msg::ScrollFragmentPoint(..) => write!(f, "ScrollFragmentPoint"),
Msg::ChangeRunningAnimationsState(..) => write!(f, "ChangeRunningAnimationsState"),
Msg::ChangePageTitle(..) => write!(f, "ChangePageTitle"),
Msg::ChangePageUrl(..) => write!(f, "ChangePageUrl"),
Msg::SetFrameTree(..) => write!(f, "SetFrameTree"),
Msg::LoadComplete(..) => write!(f, "LoadComplete"),
Msg::LoadStart(..) => write!(f, "LoadStart"),
Msg::DelayedCompositionTimeout(..) => write!(f, "DelayedCompositionTimeout"),
Msg::Recomposite(..) => write!(f, "Recomposite"),
Msg::KeyEvent(..) => write!(f, "KeyEvent"),
Msg::TouchEventProcessed(..) => write!(f, "TouchEventProcessed"),
Msg::SetCursor(..) => write!(f, "SetCursor"),
Msg::CreatePng(..) => write!(f, "CreatePng"),
Msg::ViewportConstrained(..) => write!(f, "ViewportConstrained"),
Msg::IsReadyToSaveImageReply(..) => write!(f, "IsReadyToSaveImageReply"),
Msg::NewFavicon(..) => write!(f, "NewFavicon"),
Msg::HeadParsed => write!(f, "HeadParsed"),
Msg::CollectMemoryReports(..) => write!(f, "CollectMemoryReports"),
Msg::Status(..) => write!(f, "Status"),
Msg::GetClientWindow(..) => write!(f, "GetClientWindow"),
Msg::MoveTo(..) => write!(f, "MoveTo"),
Msg::ResizeTo(..) => write!(f, "ResizeTo"),
Msg::PipelineVisibilityChanged(..) => write!(f, "PipelineVisibilityChanged"),
Msg::PipelineExited(..) => write!(f, "PipelineExited"),
Msg::NewScrollFrameReady(..) => write!(f, "NewScrollFrameReady"),
Msg::Dispatch(..) => write!(f, "Dispatch"),
}
}
}
/// Data used to construct a compositor.
pub struct InitialCompositorState {
/// A channel to the compositor.
pub sender: Box<CompositorProxy + Send>,
/// A port on which messages inbound to the compositor can be received.
pub receiver: Box<CompositorReceiver>,
/// A channel to the constellation.
pub constellation_chan: Sender<ConstellationMsg>,
/// A channel to the time profiler thread.
pub time_profiler_chan: time::ProfilerChan,
/// A channel to the memory profiler thread.
pub mem_profiler_chan: mem::ProfilerChan,
/// Instance of webrender API
pub webrender: webrender::Renderer,
pub webrender_api_sender: webrender_traits::RenderApiSender,
}<|fim▁end|> | |
<|file_name|>silverlight_attach.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2012, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
if(!dojo._hasResource["dojox.gfx.silverlight_attach"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code.
dojo._hasResource["dojox.gfx.silverlight_attach"] = true;
dojo.provide("dojox.gfx.silverlight_attach");
dojo.require("dojox.gfx.silverlight");
dojo.experimental("dojox.gfx.silverlight_attach");
(function(){
var g = dojox.gfx, sl = g.silverlight;
sl.attachNode = function(node){
// summary: creates a shape from a Node
// node: Node: an Silverlight node
return null; // not implemented
};
sl.attachSurface = function(node){
// summary: creates a surface from a Node
// node: Node: an Silverlight node
return null; // dojox.gfx.Surface
};
})();<|fim▁hole|>
}<|fim▁end|> | |
<|file_name|>unwind-box.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32
extern mod extra;
use std::task;
fn f() {
let a = @0;
fail!();
}
pub fn main() {
task::spawn_unlinked(f);
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Django Diário documentation build configuration file, created by
# sphinx-quickstart v0.5.2.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.append(os.path.abspath('../examples'))
from basic_blog import settings
from django.core.management import setup_environ
setup_environ(settings)
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Django Diário'
copyright = u'2009, Guilherme Gondim and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.<|fim▁hole|>
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoDiariodoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'DjangoDirio.tex', ur'Django Diário Documentation',
ur'Guilherme Gondim and contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/dev': None}<|fim▁end|> | #html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False |
<|file_name|>sets.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import logging
from pymatgen.core import Molecule
from pymatgen.io.qchem_io.inputs import QCInput
from pymatgen.io.qchem_io.utils import lower_and_check_unique
# Classes for reading/manipulating/writing QChem ouput files.
__author__ = "Samuel Blau, Brandon Wood, Shyam Dwaraknath"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
logger = logging.getLogger(__name__)
class QChemDictSet(QCInput):
"""
Build a QCInput given all the various input parameters. Can be extended by standard implementations below.
"""
def __init__(self,
molecule,
job_type,
basis_set,
scf_algorithm,
dft_rung=4,
pcm_dielectric=None,
max_scf_cycles=200,
geom_opt_max_cycles=200,
overwrite_inputs=None):
"""
Args:
molecule (Pymatgen molecule object)
job_type (str)
basis_set (str)
scf_algorithm (str)
dft_rung (int)
pcm_dielectric (str)
max_scf_cycles (int)<|fim▁hole|> overwrite_inputs (dict): This is dictionary of QChem input sections to add or overwrite variables,
the available sections are currently rem, pcm, and solvent. So the accepted keys are rem, pcm, or solvent
and the value is a dictionary of key value pairs relevant to the section. An example would be adding a
new variable to the rem section that sets symmetry to false.
ex. overwrite_inputs = {"rem": {"symmetry": "false"}}
***It should be noted that if something like basis is added to the rem dict it will overwrite
the default basis.***
"""
self.molecule = molecule
self.job_type = job_type
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.dft_rung = dft_rung
self.pcm_dielectric = pcm_dielectric
self.max_scf_cycles = max_scf_cycles
self.geom_opt_max_cycles = geom_opt_max_cycles
self.overwrite_inputs = overwrite_inputs
pcm_defaults = {
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1"
}
mypcm = {}
mysolvent = {}
myrem = {}
myrem["job_type"] = job_type
myrem["basis"] = self.basis_set
myrem["max_scf_cycles"] = self.max_scf_cycles
myrem["gen_scfman"] = "true"
myrem["scf_algorithm"] = self.scf_algorithm
if self.dft_rung == 1:
myrem["exchange"] = "B3LYP"
elif self.dft_rung == 2:
myrem["method"] = "B97-D3"
myrem["dft_D"] = "D3_BJ"
elif self.dft_rung == 3:
myrem["method"] = "B97M-rV"
elif self.dft_rung == 4:
myrem["method"] = "wb97xd"
elif self.dft_rung == 5:
myrem["method"] = "wB97M-V"
else:
raise ValueError("dft_rung should be between 1 and 5!")
if self.job_type.lower() == "opt":
myrem["geom_opt_max_cycles"] = self.geom_opt_max_cycles
if self.pcm_dielectric != None:
mypcm = pcm_defaults
mysolvent["dielectric"] = self.pcm_dielectric
myrem["solvent_method"] = 'pcm'
if self.overwrite_inputs:
for sec, sec_dict in self.overwrite_inputs.items():
if sec == "rem":
temp_rem = lower_and_check_unique(sec_dict)
for k, v in temp_rem.items():
myrem[k] = v
if sec == "pcm":
temp_pcm = lower_and_check_unique(sec_dict)
for k, v in temp_pcm.items():
mypcm[k] = v
if sec == "solvent":
temp_solvent = lower_and_check_unique(sec_dict)
for k, v in temp_solvent.items():
mysolvent[k] = v
super(QChemDictSet, self).__init__(
self.molecule, rem=myrem, pcm=mypcm, solvent=mysolvent)
class OptSet(QChemDictSet):
"""
QChemDictSet for a geometry optimization
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
geom_opt_max_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
self.geom_opt_max_cycles = geom_opt_max_cycles
super(OptSet, self).__init__(
molecule=molecule,
job_type="opt",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
geom_opt_max_cycles=self.geom_opt_max_cycles,
overwrite_inputs=overwrite_inputs)
class SinglePointSet(QChemDictSet):
"""
QChemDictSet for a single point calculation
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
super(SinglePointSet, self).__init__(
molecule=molecule,
job_type="sp",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
overwrite_inputs=overwrite_inputs)
class FreqSet(QChemDictSet):
"""
QChemDictSet for a single point calculation
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
super(FreqSet, self).__init__(
molecule=molecule,
job_type="freq",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
overwrite_inputs=overwrite_inputs)<|fim▁end|> | geom_opt_max_cycles (int) |
<|file_name|>aws_codecommit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Shuang Wang <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: aws_codecommit
version_added: "2.8"
short_description: Manage repositories in AWS CodeCommit
description:
- Supports creation and deletion of CodeCommit repositories.
- See U(https://aws.amazon.com/codecommit/) for more information about CodeCommit.
author: Shuang Wang (@ptux)
requirements:
- botocore
- boto3
- python >= 2.6
options:
name:
description:
- name of repository.
required: true
comment:
description:
- description or comment of repository.
required: false
state:
description:
- Specifies the state of repository.
required: true
choices: [ 'present', 'absent' ]
extends_documentation_fragment:
- aws
- ec2
'''
RETURN = '''
repository_metadata:
description: "Information about the repository."
returned: always
type: complex
contains:
account_id:
description: "The ID of the AWS account associated with the repository."
returned: when state is present
type: str
sample: "268342293637"
arn:
description: "The Amazon Resource Name (ARN) of the repository."
returned: when state is present
type: str
sample: "arn:aws:codecommit:ap-northeast-1:268342293637:username"
clone_url_http:
description: "The URL to use for cloning the repository over HTTPS."
returned: when state is present
type: str
sample: "https://git-codecommit.ap-northeast-1.amazonaws.com/v1/repos/reponame"
clone_url_ssh:
description: "The URL to use for cloning the repository over SSH."
returned: when state is present
type: str
sample: "ssh://git-codecommit.ap-northeast-1.amazonaws.com/v1/repos/reponame"
creation_date:
description: "The date and time the repository was created, in timestamp format."
returned: when state is present
type: str
sample: "2018-10-16T13:21:41.261000+09:00"
last_modified_date:
description: "The date and time the repository was last modified, in timestamp format."
returned: when state is present
type: str
sample: "2018-10-16T13:21:41.261000+09:00"
repository_description:
description: "A comment or description about the repository."
returned: when state is present
type: str
sample: "test from ptux"
repository_id:
description: "The ID of the repository that was created or deleted"
returned: always
type: str
sample: "e62a5c54-i879-497b-b62f-9f99e4ebfk8e"
repository_name:
description: "The repository's name."
returned: when state is present
type: str
sample: "reponame"
response_metadata:
description: "Information about the response."
returned: always
type: complex
contains:
http_headers:
description: "http headers of http response"
returned: always
type: dict
http_status_code:
description: "http status code of http response"
returned: always
type: str
sample: "200"
request_id:
description: "http request id"
returned: always
type: str
sample: "fb49cfca-d0fa-11e8-85cb-b3cc4b5045ef"
retry_attempts:
description: "numbers of retry attempts"
returned: always
type: str
sample: "0"
'''
EXAMPLES = '''
# Create a new repository
- aws_codecommit:
name: repo
state: present
# Delete a repository
- aws_codecommit:
name: repo
state: absent
'''
try:
import botocore
except ImportError:
pass # Handled by AnsibleAWSModule
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
class CodeCommit(object):
def __init__(self, module=None):
self._module = module
self._client = self._module.client('codecommit')
self._check_mode = self._module.check_mode
def process(self):
result = dict(changed=False)
if self._module.params['state'] == 'present' and not self._repository_exists():
if not self._module.check_mode:
result = self._create_repository()
result['changed'] = True
if self._module.params['state'] == 'absent' and self._repository_exists():
if not self._module.check_mode:
result = self._delete_repository()
result['changed'] = True
return result
def _repository_exists(self):
try:
paginator = self._client.get_paginator('list_repositories')
for page in paginator.paginate():
repositories = page['repositories']
for item in repositories:
if self._module.params['name'] in item.values():
return True
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self._module.fail_json_aws(e, msg="couldn't get repository")
return False
def _create_repository(self):
try:
result = self._client.create_repository(
repositoryName=self._module.params['name'],
repositoryDescription=self._module.params['comment']
)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self._module.fail_json_aws(e, msg="couldn't create repository")
return result
def _delete_repository(self):
try:
result = self._client.delete_repository(
repositoryName=self._module.params['name']
)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self._module.fail_json_aws(e, msg="couldn't delete repository")
return result
def main():
argument_spec = dict(
name=dict(required=True),
state=dict(choices=['present', 'absent'], required=True),
comment=dict(default='')
)
ansible_aws_module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True
)
aws_codecommit = CodeCommit(module=ansible_aws_module)
result = aws_codecommit.process()
ansible_aws_module.exit_json(**camel_dict_to_snake_dict(result))
<|fim▁hole|>if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rusoto;
use std::{thread, time};
use rusoto::rds::{RdsClient, CreateDBInstanceMessage, DescribeDBInstancesMessage};
use rusoto::{DefaultCredentialsProvider, Region};
use rusoto::default_tls_client;
fn main() {
let database_instance_name = "rusototester2";
let credentials = DefaultCredentialsProvider::new().unwrap();
// Security groups in the default VPC will need modification to let you access this from the internet:
let rds_client = RdsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);
let create_db_instance_request = CreateDBInstanceMessage {
allocated_storage: Some(5),
backup_retention_period: Some(0),
db_instance_identifier: database_instance_name.to_string(),
db_instance_class: "db.t2.micro".to_string(),
// name and login details should match `.env` in rusoto-rocket
master_user_password: Some("TotallySecurePassword501".to_string()),
master_username: Some("masteruser".to_string()),
db_name: Some("rusotodb".to_string()),
engine: "postgres".to_string(),
multi_az: Some(false),
..Default::default()
};
println!("Going to make the database instance.");
let db_creation_result = rds_client.create_db_instance(&create_db_instance_request).unwrap();
println!("Created! \n\n{:?}", db_creation_result);
// The endpoint isn't available until the DB is created, let's wait for it:
let describe_instances_request = DescribeDBInstancesMessage {
db_instance_identifier: Some(database_instance_name.to_string()),
..Default::default()
};
let endpoint : rusoto::rds::Endpoint;
let ten_seconds = time::Duration::from_millis(10000);
loop {
match rds_client.describe_db_instances(&describe_instances_request).unwrap().db_instances.unwrap()[0].endpoint {
Some(ref endpoint_result) => {
endpoint = endpoint_result.clone();
break;
},
None => {
println!("Waiting for db to be available...");
thread::sleep(ten_seconds);
continue;
},
};
}
let endpoint_address = endpoint.address.unwrap();<|fim▁hole|> let endpoint_port = endpoint.port.unwrap();
println!("\n\nendpoint: {:?}", format!("{}:{}", endpoint_address, endpoint_port));
}<|fim▁end|> | |
<|file_name|>connections.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, print_function, division)
import itertools
import time
import hyperframe.frame
from hpack.hpack import Encoder, Decoder
from netlib import utils
from netlib.http import url
import netlib.http.headers
import netlib.http.response<|fim▁hole|>class TCPHandler(object):
def __init__(self, rfile, wfile=None):
self.rfile = rfile
self.wfile = wfile
class HTTP2Protocol(object):
ERROR_CODES = utils.BiDi(
NO_ERROR=0x0,
PROTOCOL_ERROR=0x1,
INTERNAL_ERROR=0x2,
FLOW_CONTROL_ERROR=0x3,
SETTINGS_TIMEOUT=0x4,
STREAM_CLOSED=0x5,
FRAME_SIZE_ERROR=0x6,
REFUSED_STREAM=0x7,
CANCEL=0x8,
COMPRESSION_ERROR=0x9,
CONNECT_ERROR=0xa,
ENHANCE_YOUR_CALM=0xb,
INADEQUATE_SECURITY=0xc,
HTTP_1_1_REQUIRED=0xd
)
CLIENT_CONNECTION_PREFACE = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
HTTP2_DEFAULT_SETTINGS = {
hyperframe.frame.SettingsFrame.HEADER_TABLE_SIZE: 4096,
hyperframe.frame.SettingsFrame.ENABLE_PUSH: 1,
hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: None,
hyperframe.frame.SettingsFrame.INITIAL_WINDOW_SIZE: 2 ** 16 - 1,
hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE: 2 ** 14,
hyperframe.frame.SettingsFrame.MAX_HEADER_LIST_SIZE: None,
}
def __init__(
self,
tcp_handler=None,
rfile=None,
wfile=None,
is_server=False,
dump_frames=False,
encoder=None,
decoder=None,
unhandled_frame_cb=None,
):
self.tcp_handler = tcp_handler or TCPHandler(rfile, wfile)
self.is_server = is_server
self.dump_frames = dump_frames
self.encoder = encoder or Encoder()
self.decoder = decoder or Decoder()
self.unhandled_frame_cb = unhandled_frame_cb
self.http2_settings = self.HTTP2_DEFAULT_SETTINGS.copy()
self.current_stream_id = None
self.connection_preface_performed = False
def read_request(
self,
__rfile,
include_body=True,
body_size_limit=None,
allow_empty=False,
):
if body_size_limit is not None:
raise NotImplementedError()
self.perform_connection_preface()
timestamp_start = time.time()
if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
self.tcp_handler.rfile.reset_timestamps()
stream_id, headers, body = self._receive_transmission(
include_body=include_body,
)
if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
timestamp_end = time.time()
authority = headers.get(':authority', b'')
method = headers.get(':method', 'GET')
scheme = headers.get(':scheme', 'https')
path = headers.get(':path', '/')
headers.clear(":method")
headers.clear(":scheme")
headers.clear(":path")
host = None
port = None
if path == '*' or path.startswith("/"):
first_line_format = "relative"
elif method == 'CONNECT':
first_line_format = "authority"
if ":" in authority:
host, port = authority.split(":", 1)
else:
host = authority
else:
first_line_format = "absolute"
# FIXME: verify if path or :host contains what we need
scheme, host, port, _ = url.parse(path)
scheme = scheme.decode('ascii')
host = host.decode('ascii')
if host is None:
host = 'localhost'
if port is None:
port = 80 if scheme == 'http' else 443
port = int(port)
request = netlib.http.request.Request(
first_line_format,
method.encode('ascii'),
scheme.encode('ascii'),
host.encode('ascii'),
port,
path.encode('ascii'),
b"HTTP/2.0",
headers,
body,
timestamp_start,
timestamp_end,
)
request.stream_id = stream_id
return request
def read_response(
self,
__rfile,
request_method=b'',
body_size_limit=None,
include_body=True,
stream_id=None,
):
if body_size_limit is not None:
raise NotImplementedError()
self.perform_connection_preface()
timestamp_start = time.time()
if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
self.tcp_handler.rfile.reset_timestamps()
stream_id, headers, body = self._receive_transmission(
stream_id=stream_id,
include_body=include_body,
)
if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
if include_body:
timestamp_end = time.time()
else:
timestamp_end = None
response = netlib.http.response.Response(
b"HTTP/2.0",
int(headers.get(':status', 502)),
b'',
headers,
body,
timestamp_start=timestamp_start,
timestamp_end=timestamp_end,
)
response.stream_id = stream_id
return response
def assemble(self, message):
if isinstance(message, netlib.http.request.Request):
return self.assemble_request(message)
elif isinstance(message, netlib.http.response.Response):
return self.assemble_response(message)
else:
raise ValueError("HTTP message not supported.")
def assemble_request(self, request):
assert isinstance(request, netlib.http.request.Request)
authority = self.tcp_handler.sni if self.tcp_handler.sni else self.tcp_handler.address.host
if self.tcp_handler.address.port != 443:
authority += ":%d" % self.tcp_handler.address.port
headers = request.headers.copy()
if ':authority' not in headers:
headers.insert(0, b':authority', authority.encode('ascii'))
headers.insert(0, b':scheme', request.scheme.encode('ascii'))
headers.insert(0, b':path', request.path.encode('ascii'))
headers.insert(0, b':method', request.method.encode('ascii'))
if hasattr(request, 'stream_id'):
stream_id = request.stream_id
else:
stream_id = self._next_stream_id()
return list(itertools.chain(
self._create_headers(headers, stream_id, end_stream=(request.body is None or len(request.body) == 0)),
self._create_body(request.body, stream_id)))
def assemble_response(self, response):
assert isinstance(response, netlib.http.response.Response)
headers = response.headers.copy()
if ':status' not in headers:
headers.insert(0, b':status', str(response.status_code).encode('ascii'))
if hasattr(response, 'stream_id'):
stream_id = response.stream_id
else:
stream_id = self._next_stream_id()
return list(itertools.chain(
self._create_headers(headers, stream_id, end_stream=(response.body is None or len(response.body) == 0)),
self._create_body(response.body, stream_id),
))
def perform_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
if self.is_server:
self.perform_server_connection_preface(force)
else:
self.perform_client_connection_preface(force)
def perform_server_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
self.connection_preface_performed = True
magic_length = len(self.CLIENT_CONNECTION_PREFACE)
magic = self.tcp_handler.rfile.safe_read(magic_length)
assert magic == self.CLIENT_CONNECTION_PREFACE
frm = hyperframe.frame.SettingsFrame(settings={
hyperframe.frame.SettingsFrame.ENABLE_PUSH: 0,
hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1,
})
self.send_frame(frm, hide=True)
self._receive_settings(hide=True)
def perform_client_connection_preface(self, force=False):
if force or not self.connection_preface_performed:
self.connection_preface_performed = True
self.tcp_handler.wfile.write(self.CLIENT_CONNECTION_PREFACE)
self.send_frame(hyperframe.frame.SettingsFrame(), hide=True)
self._receive_settings(hide=True) # server announces own settings
self._receive_settings(hide=True) # server acks my settings
def send_frame(self, frm, hide=False):
raw_bytes = frm.serialize()
self.tcp_handler.wfile.write(raw_bytes)
self.tcp_handler.wfile.flush()
if not hide and self.dump_frames: # pragma no cover
print(frm.human_readable(">>"))
def read_frame(self, hide=False):
while True:
frm = framereader.http2_read_frame(self.tcp_handler.rfile)
if not hide and self.dump_frames: # pragma no cover
print(frm.human_readable("<<"))
if isinstance(frm, hyperframe.frame.PingFrame):
raw_bytes = hyperframe.frame.PingFrame(flags=['ACK'], payload=frm.payload).serialize()
self.tcp_handler.wfile.write(raw_bytes)
self.tcp_handler.wfile.flush()
continue
if isinstance(frm, hyperframe.frame.SettingsFrame) and 'ACK' not in frm.flags:
self._apply_settings(frm.settings, hide)
if isinstance(frm, hyperframe.frame.DataFrame) and frm.flow_controlled_length > 0:
self._update_flow_control_window(frm.stream_id, frm.flow_controlled_length)
return frm
def check_alpn(self):
alp = self.tcp_handler.get_alpn_proto_negotiated()
if alp != b'h2':
raise NotImplementedError(
"HTTP2Protocol can not handle unknown ALP: %s" % alp)
return True
def _handle_unexpected_frame(self, frm):
if isinstance(frm, hyperframe.frame.SettingsFrame):
return
if self.unhandled_frame_cb:
self.unhandled_frame_cb(frm)
def _receive_settings(self, hide=False):
while True:
frm = self.read_frame(hide)
if isinstance(frm, hyperframe.frame.SettingsFrame):
break
else:
self._handle_unexpected_frame(frm)
def _next_stream_id(self):
if self.current_stream_id is None:
if self.is_server:
# servers must use even stream ids
self.current_stream_id = 2
else:
# clients must use odd stream ids
self.current_stream_id = 1
else:
self.current_stream_id += 2
return self.current_stream_id
def _apply_settings(self, settings, hide=False):
for setting, value in settings.items():
old_value = self.http2_settings[setting]
if not old_value:
old_value = '-'
self.http2_settings[setting] = value
frm = hyperframe.frame.SettingsFrame(flags=['ACK'])
self.send_frame(frm, hide)
def _update_flow_control_window(self, stream_id, increment):
frm = hyperframe.frame.WindowUpdateFrame(stream_id=0, window_increment=increment)
self.send_frame(frm)
frm = hyperframe.frame.WindowUpdateFrame(stream_id=stream_id, window_increment=increment)
self.send_frame(frm)
def _create_headers(self, headers, stream_id, end_stream=True):
def frame_cls(chunks):
for i in chunks:
if i == 0:
yield hyperframe.frame.HeadersFrame, i
else:
yield hyperframe.frame.ContinuationFrame, i
header_block_fragment = self.encoder.encode(headers.fields)
chunk_size = self.http2_settings[hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE]
chunks = range(0, len(header_block_fragment), chunk_size)
frms = [frm_cls(
flags=[],
stream_id=stream_id,
data=header_block_fragment[i:i + chunk_size]) for frm_cls, i in frame_cls(chunks)]
frms[-1].flags.add('END_HEADERS')
if end_stream:
frms[0].flags.add('END_STREAM')
if self.dump_frames: # pragma no cover
for frm in frms:
print(frm.human_readable(">>"))
return [frm.serialize() for frm in frms]
def _create_body(self, body, stream_id):
if body is None or len(body) == 0:
return b''
chunk_size = self.http2_settings[hyperframe.frame.SettingsFrame.MAX_FRAME_SIZE]
chunks = range(0, len(body), chunk_size)
frms = [hyperframe.frame.DataFrame(
flags=[],
stream_id=stream_id,
data=body[i:i + chunk_size]) for i in chunks]
frms[-1].flags.add('END_STREAM')
if self.dump_frames: # pragma no cover
for frm in frms:
print(frm.human_readable(">>"))
return [frm.serialize() for frm in frms]
def _receive_transmission(self, stream_id=None, include_body=True):
if not include_body:
raise NotImplementedError()
body_expected = True
header_blocks = b''
body = b''
while True:
frm = self.read_frame()
if (
(isinstance(frm, hyperframe.frame.HeadersFrame) or isinstance(frm, hyperframe.frame.ContinuationFrame)) and
(stream_id is None or frm.stream_id == stream_id)
):
stream_id = frm.stream_id
header_blocks += frm.data
if 'END_STREAM' in frm.flags:
body_expected = False
if 'END_HEADERS' in frm.flags:
break
else:
self._handle_unexpected_frame(frm)
while body_expected:
frm = self.read_frame()
if isinstance(frm, hyperframe.frame.DataFrame) and frm.stream_id == stream_id:
body += frm.data
if 'END_STREAM' in frm.flags:
break
else:
self._handle_unexpected_frame(frm)
headers = netlib.http.headers.Headers(
(k.encode('ascii'), v.encode('ascii')) for k, v in self.decoder.decode(header_blocks)
)
return stream_id, headers, body<|fim▁end|> | import netlib.http.request
from netlib.http.http2 import framereader
|
<|file_name|>commonUtils.js<|end_file_name|><|fim▁begin|>var functions = {}
functions.evaluateSnapshotType = function (name) {
var splittedName = name.split('-')
var type = splittedName[splittedName.length - 1].split('.')[0]
return type === 'motion' ? type : type === 'snapshot' ? 'periodic' : 'unknown'
}<|fim▁hole|>functions.getSnapshotDate = function (name) {
var splittedData = name.split('.')[0].split('-')[0].split('/')
return splittedData[splittedData.length - 1]
}
module.exports = functions<|fim▁end|> | |
<|file_name|>yui-rls-debug.js<|end_file_name|><|fim▁begin|>/**
* The YUI module contains the components required for building the YUI seed
* file. This includes the script loading mechanism, a simple queue, and
* the core utilities for the library.
* @module yui
* @submodule yui-base
*/
if (typeof YUI != 'undefined') {
YUI._YUI = YUI;
}
/**
The YUI global namespace object. If YUI is already defined, the
existing YUI object will not be overwritten so that defined
namespaces are preserved. It is the constructor for the object
the end user interacts with. As indicated below, each instance
has full custom event support, but only if the event system
is available. This is a self-instantiable factory function. You
can invoke it directly like this:
YUI().use('*', function(Y) {
// ready
});
But it also works like this:
var Y = YUI();
@class YUI
@constructor
@global
@uses EventTarget
@param o* {Object} 0..n optional configuration objects. these values
are store in Y.config. See <a href="config.html">Config</a> for the list of supported
properties.
*/
/*global YUI*/
/*global YUI_config*/
var YUI = function() {
var i = 0,
Y = this,
args = arguments,
l = args.length,
instanceOf = function(o, type) {
return (o && o.hasOwnProperty && (o instanceof type));
},
gconf = (typeof YUI_config !== 'undefined') && YUI_config;
if (!(instanceOf(Y, YUI))) {
Y = new YUI();
} else {
// set up the core environment
Y._init();
/**
YUI.GlobalConfig is a master configuration that might span
multiple contexts in a non-browser environment. It is applied
first to all instances in all contexts.
@property YUI.GlobalConfig
@type {Object}
@global
@example
YUI.GlobalConfig = {
filter: 'debug'
};
YUI().use('node', function(Y) {
//debug files used here
});
YUI({
filter: 'min'
}).use('node', function(Y) {
//min files used here
});
*/
if (YUI.GlobalConfig) {
Y.applyConfig(YUI.GlobalConfig);
}
/**
YUI_config is a page-level config. It is applied to all
instances created on the page. This is applied after
YUI.GlobalConfig, and before the instance level configuration
objects.
@global
@property YUI_config
@type {Object}
@example
//Single global var to include before YUI seed file
YUI_config = {
filter: 'debug'
};
YUI().use('node', function(Y) {
//debug files used here
});
YUI({
filter: 'min'
}).use('node', function(Y) {
//min files used here
});
*/
if (gconf) {
Y.applyConfig(gconf);
}
// bind the specified additional modules for this instance
if (!l) {
Y._setup();
}
}
if (l) {
// Each instance can accept one or more configuration objects.
// These are applied after YUI.GlobalConfig and YUI_Config,
// overriding values set in those config files if there is a '
// matching property.
for (; i < l; i++) {
Y.applyConfig(args[i]);
}
Y._setup();
}
Y.instanceOf = instanceOf;
return Y;
};
(function() {
var proto, prop,
VERSION = '@VERSION@',
PERIOD = '.',
BASE = 'http://yui.yahooapis.com/',
DOC_LABEL = 'yui3-js-enabled',
NOOP = function() {},
SLICE = Array.prototype.slice,
APPLY_TO_AUTH = { 'io.xdrReady': 1, // the functions applyTo
'io.xdrResponse': 1, // can call. this should
'SWF.eventHandler': 1 }, // be done at build time
hasWin = (typeof window != 'undefined'),
win = (hasWin) ? window : null,
doc = (hasWin) ? win.document : null,
docEl = doc && doc.documentElement,
docClass = docEl && docEl.className,
instances = {},
time = new Date().getTime(),
add = function(el, type, fn, capture) {
if (el && el.addEventListener) {
el.addEventListener(type, fn, capture);
} else if (el && el.attachEvent) {
el.attachEvent('on' + type, fn);
}
},
remove = function(el, type, fn, capture) {
if (el && el.removeEventListener) {
// this can throw an uncaught exception in FF
try {
el.removeEventListener(type, fn, capture);
} catch (ex) {}
} else if (el && el.detachEvent) {
el.detachEvent('on' + type, fn);
}
},
handleLoad = function() {
YUI.Env.windowLoaded = true;
YUI.Env.DOMReady = true;
if (hasWin) {
remove(window, 'load', handleLoad);
}
},
getLoader = function(Y, o) {
var loader = Y.Env._loader;
if (loader) {
//loader._config(Y.config);
loader.ignoreRegistered = false;
loader.onEnd = null;
loader.data = null;
loader.required = [];
loader.loadType = null;
} else {
loader = new Y.Loader(Y.config);
Y.Env._loader = loader;
}
return loader;
},
clobber = function(r, s) {
for (var i in s) {
if (s.hasOwnProperty(i)) {
r[i] = s[i];
}
}
},
ALREADY_DONE = { success: true };
// Stamp the documentElement (HTML) with a class of "yui-loaded" to
// enable styles that need to key off of JS being enabled.
if (docEl && docClass.indexOf(DOC_LABEL) == -1) {
if (docClass) {
docClass += ' ';
}
docClass += DOC_LABEL;
docEl.className = docClass;
}
if (VERSION.indexOf('@') > -1) {
VERSION = '3.3.0'; // dev time hack for cdn test
}
proto = {
/**
* Applies a new configuration object to the YUI instance config.
* This will merge new group/module definitions, and will also
* update the loader cache if necessary. Updating Y.config directly
* will not update the cache.
* @method applyConfig
* @param {Object} o the configuration object.
* @since 3.2.0
*/
applyConfig: function(o) {
o = o || NOOP;
var attr,
name,
// detail,
config = this.config,
mods = config.modules,
groups = config.groups,
rls = config.rls,
loader = this.Env._loader;
for (name in o) {
if (o.hasOwnProperty(name)) {
attr = o[name];
if (mods && name == 'modules') {
clobber(mods, attr);
} else if (groups && name == 'groups') {
clobber(groups, attr);
} else if (rls && name == 'rls') {
clobber(rls, attr);
} else if (name == 'win') {
config[name] = attr.contentWindow || attr;
config.doc = config[name].document;
} else if (name == '_yuid') {
// preserve the guid
} else {
config[name] = attr;
}
}
}
if (loader) {
loader._config(o);
}
},
/**
* Old way to apply a config to the instance (calls `applyConfig` under the hood)
* @private
* @method _config
* @param {Object} o The config to apply
*/
_config: function(o) {
this.applyConfig(o);
},
/**
* Initialize this YUI instance
* @private
* @method _init
*/
_init: function() {
var filter,
Y = this,
G_ENV = YUI.Env,
Env = Y.Env,
prop;
/**
* The version number of the YUI instance.
* @property version
* @type string
*/
Y.version = VERSION;
if (!Env) {
Y.Env = {
mods: {}, // flat module map
versions: {}, // version module map
base: BASE,
cdn: BASE + VERSION + '/build/',
// bootstrapped: false,
_idx: 0,
_used: {},
_attached: {},
_missed: [],
_yidx: 0,
_uidx: 0,
_guidp: 'y',
_loaded: {},
// serviced: {},
// Regex in English:
// I'll start at the \b(simpleyui).
// 1. Look in the test string for "simpleyui" or "yui" or
// "yui-base" or "yui-rls" or "yui-foobar" that comes after a word break. That is, it
// can't match "foyui" or "i_heart_simpleyui". This can be anywhere in the string.
// 2. After #1 must come a forward slash followed by the string matched in #1, so
// "yui-base/yui-base" or "simpleyui/simpleyui" or "yui-pants/yui-pants".
// 3. The second occurence of the #1 token can optionally be followed by "-debug" or "-min",
// so "yui/yui-min", "yui/yui-debug", "yui-base/yui-base-debug". NOT "yui/yui-tshirt".
// 4. This is followed by ".js", so "yui/yui.js", "simpleyui/simpleyui-min.js"
// 0. Going back to the beginning, now. If all that stuff in 1-4 comes after a "?" in the string,
// then capture the junk between the LAST "&" and the string in 1-4. So
// "blah?foo/yui/yui.js" will capture "foo/" and "blah?some/thing.js&3.3.0/build/yui-rls/yui-rls.js"
// will capture "3.3.0/build/"
//
// Regex Exploded:
// (?:\? Find a ?
// (?:[^&]*&) followed by 0..n characters followed by an &
// * in fact, find as many sets of characters followed by a & as you can
// ([^&]*) capture the stuff after the last & in \1
// )? but it's ok if all this ?junk&more_junk stuff isn't even there
// \b(simpleyui| after a word break find either the string "simpleyui" or
// yui(?:-\w+)? the string "yui" optionally followed by a -, then more characters
// ) and store the simpleyui or yui-* string in \2
// \/\2 then comes a / followed by the simpleyui or yui-* string in \2
// (?:-(min|debug))? optionally followed by "-min" or "-debug"
// .js and ending in ".js"
_BASE_RE: /(?:\?(?:[^&]*&)*([^&]*))?\b(simpleyui|yui(?:-\w+)?)\/\2(?:-(min|debug))?\.js/,
parseBasePath: function(src, pattern) {
var match = src.match(pattern),
path, filter;
if (match) {
path = RegExp.leftContext || src.slice(0, src.indexOf(match[0]));
// this is to set up the path to the loader. The file
// filter for loader should match the yui include.
filter = match[3];
// extract correct path for mixed combo urls
// http://yuilibrary.com/projects/yui3/ticket/2528423
if (match[1]) {
path += '?' + match[1];
}
path = {
filter: filter,
path: path
}
}
return path;
},
getBase: G_ENV && G_ENV.getBase ||
function(pattern) {
var nodes = (doc && doc.getElementsByTagName('script')) || [],
path = Env.cdn, parsed,
i, len, src;
for (i = 0, len = nodes.length; i < len; ++i) {
src = nodes[i].src;
if (src) {
parsed = Y.Env.parseBasePath(src, pattern);
if (parsed) {
filter = parsed.filter;
path = parsed.path;
break;
}
}
}
// use CDN default
return path;
}
};
Env = Y.Env;
Env._loaded[VERSION] = {};
if (G_ENV && Y !== YUI) {
Env._yidx = ++G_ENV._yidx;
Env._guidp = ('yui_' + VERSION + '_' +
Env._yidx + '_' + time).replace(/\./g, '_');
} else if (YUI._YUI) {
G_ENV = YUI._YUI.Env;
Env._yidx += G_ENV._yidx;
Env._uidx += G_ENV._uidx;
for (prop in G_ENV) {
if (!(prop in Env)) {
Env[prop] = G_ENV[prop];
}
}
delete YUI._YUI;
}
Y.id = Y.stamp(Y);
instances[Y.id] = Y;
}
Y.constructor = YUI;
// configuration defaults
Y.config = Y.config || {
win: win,
doc: doc,
debug: true,
useBrowserConsole: true,
throwFail: true,
bootstrap: true,
cacheUse: true,
fetchCSS: true,
use_rls: true,
rls_timeout: 2000
};
if (YUI.Env.rls_disabled) {
Y.config.use_rls = false;
}
Y.config.lang = Y.config.lang || 'en-US';
Y.config.base = YUI.config.base || Y.Env.getBase(Y.Env._BASE_RE);
if (!filter || (!('mindebug').indexOf(filter))) {
filter = 'min';
}
filter = (filter) ? '-' + filter : filter;
Y.config.loaderPath = YUI.config.loaderPath || 'loader/loader' + filter + '.js';
},
/**
* Finishes the instance setup. Attaches whatever modules were defined
* when the yui modules was registered.
* @method _setup
* @private
*/
_setup: function(o) {
var i, Y = this,
core = [],
mods = YUI.Env.mods,
extras = Y.config.core || ['get','features','intl-base','rls','yui-log','yui-later'];
for (i = 0; i < extras.length; i++) {
if (mods[extras[i]]) {
core.push(extras[i]);
}
}
Y._attach(['yui-base']);
Y._attach(core);
// Y.log(Y.id + ' initialized', 'info', 'yui');
},
/**
* Executes a method on a YUI instance with
* the specified id if the specified method is whitelisted.
* @method applyTo
* @param id {String} the YUI instance id.
* @param method {String} the name of the method to exectute.
* Ex: 'Object.keys'.
* @param args {Array} the arguments to apply to the method.
* @return {Object} the return value from the applied method or null.
*/
applyTo: function(id, method, args) {
if (!(method in APPLY_TO_AUTH)) {
this.log(method + ': applyTo not allowed', 'warn', 'yui');
return null;
}
var instance = instances[id], nest, m, i;
if (instance) {
nest = method.split('.');
m = instance;
for (i = 0; i < nest.length; i = i + 1) {
m = m[nest[i]];
if (!m) {
this.log('applyTo not found: ' + method, 'warn', 'yui');
}
}
return m.apply(instance, args);
}
return null;
},
/**
Registers a module with the YUI global. The easiest way to create a
first-class YUI module is to use the YUI component build tool.
http://yuilibrary.com/projects/builder
The build system will produce the `YUI.add` wrapper for you module, along
with any configuration info required for the module.
@method add
@param name {String} module name.
@param fn {Function} entry point into the module that is used to bind module to the YUI instance.
@param {YUI} fn.Y The YUI instance this module is executed in.
@param {String} fn.name The name of the module
@param version {String} version string.
@param details {Object} optional config data:
@param details.requires {Array} features that must be present before this module can be attached.
@param details.optional {Array} optional features that should be present if loadOptional
is defined. Note: modules are not often loaded this way in YUI 3,
but this field is still useful to inform the user that certain
features in the component will require additional dependencies.
@param details.use {Array} features that are included within this module which need to
be attached automatically when this module is attached. This
supports the YUI 3 rollup system -- a module with submodules
defined will need to have the submodules listed in the 'use'
config. The YUI component build tool does this for you.
@return {YUI} the YUI instance.
@example
YUI.add('davglass', function(Y, name) {
Y.davglass = function() {
alert('Dav was here!');
};
}, '3.4.0', { requires: ['yui-base', 'harley-davidson', 'mt-dew'] });
*/
add: function(name, fn, version, details) {
details = details || {};
var env = YUI.Env,
mod = {
name: name,
fn: fn,
version: version,
details: details
},
loader,
i, versions = env.versions;
env.mods[name] = mod;
versions[version] = versions[version] || {};
versions[version][name] = mod;
for (i in instances) {
if (instances.hasOwnProperty(i)) {
loader = instances[i].Env._loader;
if (loader) {
if (!loader.moduleInfo[name]) {
loader.addModule(details, name);
}
}
}
}
return this;
},
/**
* Executes the function associated with each required
* module, binding the module to the YUI instance.
* @method _attach
* @private
*/
_attach: function(r, moot) {
var i, name, mod, details, req, use, after,
mods = YUI.Env.mods,
aliases = YUI.Env.aliases,
Y = this, j,
done = Y.Env._attached,
len = r.length, loader;
//console.info('attaching: ' + r, 'info', 'yui');
for (i = 0; i < len; i++) {
if (!done[r[i]]) {
name = r[i];
mod = mods[name];
if (aliases && aliases[name]) {
Y._attach(aliases[name]);
continue;
}
if (!mod) {
loader = Y.Env._loader;
if (loader && loader.moduleInfo[name]) {
mod = loader.moduleInfo[name];
if (mod.use) {
moot = true;
}
}
// Y.log('no js def for: ' + name, 'info', 'yui');
//if (!loader || !loader.moduleInfo[name]) {
//if ((!loader || !loader.moduleInfo[name]) && !moot) {
if (!moot) {
if (name.indexOf('skin-') === -1) {
Y.Env._missed.push(name);
Y.message('NOT loaded: ' + name, 'warn', 'yui');
}
}
} else {
done[name] = true;
//Don't like this, but in case a mod was asked for once, then we fetch it
//We need to remove it from the missed list
for (j = 0; j < Y.Env._missed.length; j++) {
if (Y.Env._missed[j] === name) {
Y.message('Found: ' + name + ' (was reported as missing earlier)', 'warn', 'yui');
Y.Env._missed.splice(j, 1);
}
}
details = mod.details;
req = details.requires;
use = details.use;
after = details.after;
if (req) {
for (j = 0; j < req.length; j++) {
if (!done[req[j]]) {
if (!Y._attach(req)) {
return false;
}
break;
}
}
}
if (after) {
for (j = 0; j < after.length; j++) {
if (!done[after[j]]) {
if (!Y._attach(after, true)) {
return false;
}
break;
}
}
}
if (mod.fn) {
try {
mod.fn(Y, name);
} catch (e) {
Y.error('Attach error: ' + name, e, name);
return false;
}
}
if (use) {
for (j = 0; j < use.length; j++) {
if (!done[use[j]]) {
if (!Y._attach(use)) {
return false;
}
break;
}
}
}
}
}
}
return true;
},
/**
* Attaches one or more modules to the YUI instance. When this
* is executed, the requirements are analyzed, and one of
* several things can happen:
*
* * All requirements are available on the page -- The modules
* are attached to the instance. If supplied, the use callback
* is executed synchronously.
*
* * Modules are missing, the Get utility is not available OR
* the 'bootstrap' config is false -- A warning is issued about
* the missing modules and all available modules are attached.
*
* * Modules are missing, the Loader is not available but the Get
* utility is and boostrap is not false -- The loader is bootstrapped
* before doing the following....
*
* * Modules are missing and the Loader is available -- The loader
* expands the dependency tree and fetches missing modules. When
* the loader is finshed the callback supplied to use is executed
* asynchronously.
*
* @method use
* @param modules* {String} 1-n modules to bind (uses arguments array).
* @param *callback {Function} callback function executed when
* the instance has the required functionality. If included, it
* must be the last parameter.
*
* @example
* // loads and attaches dd and its dependencies
* YUI().use('dd', function(Y) {});
*
* // loads and attaches dd and node as well as all of their dependencies (since 3.4.0)
* YUI().use(['dd', 'node'], function(Y) {});
*
* // attaches all modules that are available on the page
* YUI().use('*', function(Y) {});
*
* // intrinsic YUI gallery support (since 3.1.0)
* YUI().use('gallery-yql', function(Y) {});
*
* // intrinsic YUI 2in3 support (since 3.1.0)
* YUI().use('yui2-datatable', function(Y) {});
*
* @return {YUI} the YUI instance.
*/
use: function() {
var args = SLICE.call(arguments, 0),
callback = args[args.length - 1],
Y = this,
i = 0,
name,
Env = Y.Env,
provisioned = true;
// The last argument supplied to use can be a load complete callback
if (Y.Lang.isFunction(callback)) {
args.pop();
} else {
callback = null;
}
if (Y.Lang.isArray(args[0])) {
args = args[0];
}
if (Y.config.cacheUse) {
while ((name = args[i++])) {
if (!Env._attached[name]) {
provisioned = false;
break;
}
}
if (provisioned) {
if (args.length) {
Y.log('already provisioned: ' + args, 'info', 'yui');
}
Y._notify(callback, ALREADY_DONE, args);
return Y;
}
}
if (Y.config.cacheUse) {
while ((name = args[i++])) {
if (!Env._attached[name]) {
provisioned = false;
break;
}
}
if (provisioned) {
if (args.length) {
Y.log('already provisioned: ' + args, 'info', 'yui');
}
Y._notify(callback, ALREADY_DONE, args);
return Y;
}
}
if (Y._loading) {
Y._useQueue = Y._useQueue || new Y.Queue();
Y._useQueue.add([args, callback]);
} else {
Y._use(args, function(Y, response) {
Y._notify(callback, response, args);
});
}
return Y;
},
/**
* Notify handler from Loader for attachment/load errors
* @method _notify
* @param callback {Function} The callback to pass to the `Y.config.loadErrorFn`
* @param response {Object} The response returned from Loader
* @param args {Array} The aruments passed from Loader
* @private
*/
_notify: function(callback, response, args) {
if (!response.success && this.config.loadErrorFn) {
this.config.loadErrorFn.call(this, this, callback, response, args);
} else if (callback) {
try {
callback(this, response);
} catch (e) {
this.error('use callback error', e, args);
}
}
},
/**
* This private method is called from the `use` method queue. To ensure that only one set of loading
* logic is performed at a time.
* @method _use
* @private
* @param args* {String} 1-n modules to bind (uses arguments array).
* @param *callback {Function} callback function executed when
* the instance has the required functionality. If included, it
* must be the last parameter.
*/
_use: function(args, callback) {
if (!this.Array) {
this._attach(['yui-base']);
}
var len, loader, handleBoot, handleRLS,
Y = this,
G_ENV = YUI.Env,
mods = G_ENV.mods,
Env = Y.Env,
used = Env._used,
queue = G_ENV._loaderQueue,
firstArg = args[0],
YArray = Y.Array,
config = Y.config,
boot = config.bootstrap,
missing = [],
r = [],
ret = true,
fetchCSS = config.fetchCSS,
process = function(names, skip) {
if (!names.length) {
return;
}
YArray.each(names, function(name) {
// add this module to full list of things to attach
if (!skip) {
r.push(name);
}
// only attach a module once
if (used[name]) {
return;
}
var m = mods[name], req, use;
if (m) {
used[name] = true;
req = m.details.requires;
use = m.details.use;
} else {
// CSS files don't register themselves, see if it has
// been loaded
if (!G_ENV._loaded[VERSION][name]) {
missing.push(name);
} else {
used[name] = true; // probably css
}
}
// make sure requirements are attached
if (req && req.length) {
process(req);
}
// make sure we grab the submodule dependencies too
if (use && use.length) {
process(use, 1);
}
});
},
handleLoader = function(fromLoader) {
var response = fromLoader || {
success: true,
msg: 'not dynamic'
},
redo, origMissing,
ret = true,
data = response.data;
Y._loading = false;
if (data) {
origMissing = missing;
missing = [];
r = [];
process(data);
redo = missing.length;
if (redo) {
if (missing.sort().join() ==
origMissing.sort().join()) {
redo = false;
}
}
}
if (redo && data) {
Y._loading = false;
Y._use(args, function() {
Y.log('Nested use callback: ' + data, 'info', 'yui');
if (Y._attach(data)) {
Y._notify(callback, response, data);
}
});
} else {
if (data) {
// Y.log('attaching from loader: ' + data, 'info', 'yui');
ret = Y._attach(data);
}
if (ret) {
Y._notify(callback, response, args);
}
}
if (Y._useQueue && Y._useQueue.size() && !Y._loading) {
Y._use.apply(Y, Y._useQueue.next());
}
};
// Y.log(Y.id + ': use called: ' + a + ' :: ' + callback, 'info', 'yui');
// YUI().use('*'); // bind everything available
if (firstArg === '*') {
ret = Y._attach(Y.Object.keys(mods));
if (ret) {
handleLoader();
}
return Y;
}
// Y.log('before loader requirements: ' + args, 'info', 'yui');
// use loader to expand dependencies and sort the
// requirements if it is available.
if (boot && Y.Loader && args.length) {
loader = getLoader(Y);
loader.require(args);
loader.ignoreRegistered = true;
loader.calculate(null, (fetchCSS) ? null : 'js');
args = loader.sorted;
}
// process each requirement and any additional requirements
// the module metadata specifies
process(args);
len = missing.length;
if (len) {
missing = Y.Object.keys(YArray.hash(missing));
len = missing.length;
Y.log('Modules missing: ' + missing + ', ' + missing.length, 'info', 'yui');
}
// dynamic load
if (boot && len && Y.Loader) {
// Y.log('Using loader to fetch missing deps: ' + missing, 'info', 'yui');
Y.log('Using Loader', 'info', 'yui');
Y._loading = true;
loader = getLoader(Y);
loader.onEnd = handleLoader;
loader.context = Y;
loader.data = args;
loader.ignoreRegistered = false;
loader.require(args);
loader.insert(null, (fetchCSS) ? null : 'js');
// loader.partial(missing, (fetchCSS) ? null : 'js');
} else if (len && Y.config.use_rls && !YUI.Env.rls_enabled) {
G_ENV._rls_queue = G_ENV._rls_queue || new Y.Queue();
// server side loader service
handleRLS = function(instance, argz) {
var rls_end = function(o) {
handleLoader(o);
instance.rls_advance();
},
rls_url = instance._rls(argz);
if (rls_url) {
Y.log('Fetching RLS url', 'info', 'rls');
instance.rls_oncomplete(function(o) {
rls_end(o);
});
instance.Get.script(rls_url, {
data: argz,
timeout: instance.config.rls_timeout,
onFailure: instance.rls_handleFailure,
onTimeout: instance.rls_handleTimeout
});
} else {
rls_end({
success: true,
data: argz
});
}
};
G_ENV._rls_queue.add(function() {
Y.log('executing queued rls request', 'info', 'rls');
G_ENV._rls_in_progress = true;
Y.rls_callback = callback;
Y.rls_locals(Y, args, handleRLS);
});
if (!G_ENV._rls_in_progress && G_ENV._rls_queue.size()) {
G_ENV._rls_queue.next()();
}
} else if (boot && len && Y.Get && !Env.bootstrapped) {
Y._loading = true;
handleBoot = function() {
Y._loading = false;
queue.running = false;
Env.bootstrapped = true;
G_ENV._bootstrapping = false;
if (Y._attach(['loader'])) {
Y._use(args, callback);
}
};
if (G_ENV._bootstrapping) {
Y.log('Waiting for loader', 'info', 'yui');
queue.add(handleBoot);
} else {
G_ENV._bootstrapping = true;
Y.log('Fetching loader: ' + config.base + config.loaderPath, 'info', 'yui');
Y.Get.script(config.base + config.loaderPath, {
onEnd: handleBoot
});
}
} else {
Y.log('Attaching available dependencies: ' + args, 'info', 'yui');
ret = Y._attach(args);
if (ret) {
handleLoader();
}
}
return Y;
},
/**
Adds a namespace object onto the YUI global if called statically:
// creates YUI.your.namespace.here as nested objects
YUI.namespace("your.namespace.here");
If called as an instance method on the YUI instance, it creates the
namespace on the instance:
// creates Y.property.package
Y.namespace("property.package");
Dots in the input string cause `namespace` to create nested objects for
each token. If any part of the requested namespace already exists, the
current object will be left in place. This allows multiple calls to
`namespace` to preserve existing namespaced properties.
If the first token in the namespace string is "YAHOO", the token is
discarded.
Be careful when naming packages. Reserved words may work in some browsers
and not others. For instance, the following will fail in some browsers:
Y.namespace("really.long.nested.namespace");
This fails because `long` is a future reserved word in ECMAScript
@method namespace
@param {String[]} namespace* 1-n namespaces to create.
@return {Object} A reference to the last namespace object created.
**/
namespace: function() {
var a = arguments, o = this, i = 0, j, d, arg;
for (; i < a.length; i++) {
// d = ('' + a[i]).split('.');
arg = a[i];
if (arg.indexOf(PERIOD)) {
d = arg.split(PERIOD);
for (j = (d[0] == 'YAHOO') ? 1 : 0; j < d.length; j++) {
o[d[j]] = o[d[j]] || {};
o = o[d[j]];
}
} else {
o[arg] = o[arg] || {};
}
}
return o;
},
// this is replaced if the log module is included
log: NOOP,
message: NOOP,
// this is replaced if the dump module is included
dump: function (o) { return ''+o; },
/**
* Report an error. The reporting mechanism is controled by
* the `throwFail` configuration attribute. If throwFail is
* not specified, the message is written to the Logger, otherwise
* a JS error is thrown
* @method error
* @param msg {String} the error message.
* @param e {Error|String} Optional JS error that was caught, or an error string.
* @param data Optional additional info
* and `throwFail` is specified, this error will be re-thrown.
* @return {YUI} this YUI instance.
*/
error: function(msg, e, data) {
var Y = this, ret;
if (Y.config.errorFn) {
ret = Y.config.errorFn.apply(Y, arguments);
}
if (Y.config.throwFail && !ret) {
throw (e || new Error(msg));
} else {
Y.message(msg, 'error'); // don't scrub this one
}
return Y;
},
/**
* Generate an id that is unique among all YUI instances
* @method guid
* @param pre {String} optional guid prefix.
* @return {String} the guid.
*/
guid: function(pre) {
var id = this.Env._guidp + '_' + (++this.Env._uidx);
return (pre) ? (pre + id) : id;
},
/**
* Returns a `guid` associated with an object. If the object
* does not have one, a new one is created unless `readOnly`
* is specified.
* @method stamp
* @param o {Object} The object to stamp.
* @param readOnly {Boolean} if `true`, a valid guid will only
* be returned if the object has one assigned to it.
* @return {String} The object's guid or null.
*/
stamp: function(o, readOnly) {
var uid;
if (!o) {
return o;
}
// IE generates its own unique ID for dom nodes
// The uniqueID property of a document node returns a new ID
if (o.uniqueID && o.nodeType && o.nodeType !== 9) {
uid = o.uniqueID;
} else {
uid = (typeof o === 'string') ? o : o._yuid;
}
if (!uid) {
uid = this.guid();
if (!readOnly) {
try {
o._yuid = uid;
} catch (e) {
uid = null;
}
}
}
return uid;
},
/**
* Destroys the YUI instance
* @method destroy
* @since 3.3.0
*/
destroy: function() {
var Y = this;
if (Y.Event) {
Y.Event._unload();
}
delete instances[Y.id];
delete Y.Env;
delete Y.config;
}
/**
* instanceof check for objects that works around
* memory leak in IE when the item tested is
* window/document
* @method instanceOf
* @since 3.3.0
*/
};
YUI.prototype = proto;
// inheritance utilities are not available yet
for (prop in proto) {
if (proto.hasOwnProperty(prop)) {
YUI[prop] = proto[prop];
}
}
// set up the environment
YUI._init();
if (hasWin) {
// add a window load event at load time so we can capture
// the case where it fires before dynamic loading is
// complete.
add(window, 'load', handleLoad);
} else {
handleLoad();
}
YUI.Env.add = add;
YUI.Env.remove = remove;
/*global exports*/
// Support the CommonJS method for exporting our single global
if (typeof exports == 'object') {
exports.YUI = YUI;
}
}());
/**
* The config object contains all of the configuration options for
* the `YUI` instance. This object is supplied by the implementer
* when instantiating a `YUI` instance. Some properties have default
* values if they are not supplied by the implementer. This should
* not be updated directly because some values are cached. Use
* `applyConfig()` to update the config object on a YUI instance that
* has already been configured.
*
* @class config
* @static
*/
/**
* Allows the YUI seed file to fetch the loader component and library
* metadata to dynamically load additional dependencies.
*
* @property bootstrap
* @type boolean
* @default true
*/
/**
* Log to the browser console if debug is on and the browser has a
* supported console.
*
* @property useBrowserConsole
* @type boolean
* @default true
*/
/**
* A hash of log sources that should be logged. If specified, only
* log messages from these sources will be logged.
*
* @property logInclude
* @type object
*/
/**
* A hash of log sources that should be not be logged. If specified,
* all sources are logged if not on this list.
*
* @property logExclude
* @type object
*/
/**
* Set to true if the yui seed file was dynamically loaded in
* order to bootstrap components relying on the window load event
* and the `domready` custom event.
*
* @property injected
* @type boolean
* @default false
*/
/**
* If `throwFail` is set, `Y.error` will generate or re-throw a JS Error.
* Otherwise the failure is logged.
*
* @property throwFail
* @type boolean
* @default true
*/
/**
* The window/frame that this instance should operate in.
*
* @property win
* @type Window
* @default the window hosting YUI
*/
/**
* The document associated with the 'win' configuration.
*
* @property doc
* @type Document
* @default the document hosting YUI
*/
/**
* A list of modules that defines the YUI core (overrides the default).
*
* @property core
* @type string[]
*/
/**
* A list of languages in order of preference. This list is matched against
* the list of available languages in modules that the YUI instance uses to
* determine the best possible localization of language sensitive modules.
* Languages are represented using BCP 47 language tags, such as "en-GB" for
* English as used in the United Kingdom, or "zh-Hans-CN" for simplified
* Chinese as used in China. The list can be provided as a comma-separated
* list or as an array.
*
* @property lang
* @type string|string[]
*/
/**
* The default date format
* @property dateFormat
* @type string
* @deprecated use configuration in `DataType.Date.format()` instead.
*/
/**
* The default locale
* @property locale
* @type string
* @deprecated use `config.lang` instead.
*/
/**
* The default interval when polling in milliseconds.
* @property pollInterval
* @type int
* @default 20
*/
/**
* The number of dynamic nodes to insert by default before
* automatically removing them. This applies to script nodes
* because removing the node will not make the evaluated script
* unavailable. Dynamic CSS is not auto purged, because removing
* a linked style sheet will also remove the style definitions.
* @property purgethreshold
* @type int
* @default 20
*/
/**
* The default interval when polling in milliseconds.
* @property windowResizeDelay
* @type int
* @default 40
*/
/**
* Base directory for dynamic loading
* @property base
* @type string
*/
/*
* The secure base dir (not implemented)
* For dynamic loading.
* @property secureBase
* @type string
*/
/**
* The YUI combo service base dir. Ex: `http://yui.yahooapis.com/combo?`
* For dynamic loading.
* @property comboBase
* @type string
*/
/**
* The root path to prepend to module path for the combo service.
* Ex: 3.0.0b1/build/
* For dynamic loading.
* @property root
* @type string
*/
/**
* A filter to apply to result urls. This filter will modify the default
* path for all modules. The default path for the YUI library is the
* minified version of the files (e.g., event-min.js). The filter property
* can be a predefined filter or a custom filter. The valid predefined
* filters are:
* <dl>
* <dt>DEBUG</dt>
* <dd>Selects the debug versions of the library (e.g., event-debug.js).
* This option will automatically include the Logger widget</dd>
* <dt>RAW</dt>
* <dd>Selects the non-minified version of the library (e.g., event.js).</dd>
* </dl>
* You can also define a custom filter, which must be an object literal
* containing a search expression and a replace string:
*
* myFilter: {
* 'searchExp': "-min\\.js",
* 'replaceStr': "-debug.js"
* }
*
* For dynamic loading.
*
* @property filter
* @type string|object
*/
/**
* The `skin` config let's you configure application level skin
* customizations. It contains the following attributes which
* can be specified to override the defaults:
*
* // The default skin, which is automatically applied if not
* // overriden by a component-specific skin definition.
* // Change this in to apply a different skin globally
* defaultSkin: 'sam',
*
* // This is combined with the loader base property to get
* // the default root directory for a skin.
* base: 'assets/skins/',
*
* // Any component-specific overrides can be specified here,
* // making it possible to load different skins for different
* // components. It is possible to load more than one skin
* // for a given component as well.
* overrides: {
* slider: ['capsule', 'round']
* }
*
* For dynamic loading.
*
* @property skin
*/
/**
* Hash of per-component filter specification. If specified for a given
* component, this overrides the filter config.
*
* For dynamic loading.
*
* @property filters
*/
/**
* Use the YUI combo service to reduce the number of http connections
* required to load your dependencies. Turning this off will
* disable combo handling for YUI and all module groups configured
* with a combo service.
*
* For dynamic loading.
*
* @property combine
* @type boolean
* @default true if 'base' is not supplied, false if it is.
*/
/**
* A list of modules that should never be dynamically loaded
*
* @property ignore
* @type string[]
*/
/**
* A list of modules that should always be loaded when required, even if already
* present on the page.
*
* @property force
* @type string[]
*/
/**
* Node or id for a node that should be used as the insertion point for new
* nodes. For dynamic loading.
*
* @property insertBefore
* @type string
*/
/**
* Object literal containing attributes to add to dynamically loaded script
* nodes.
* @property jsAttributes
* @type string
*/
/**
* Object literal containing attributes to add to dynamically loaded link
* nodes.
* @property cssAttributes
* @type string
*/
/**
* Number of milliseconds before a timeout occurs when dynamically
* loading nodes. If not set, there is no timeout.
* @property timeout
* @type int
*/
/**
* Callback for the 'CSSComplete' event. When dynamically loading YUI
* components with CSS, this property fires when the CSS is finished
* loading but script loading is still ongoing. This provides an
* opportunity to enhance the presentation of a loading page a little
* bit before the entire loading process is done.
*
* @property onCSS
* @type function
*/
/**
* A hash of module definitions to add to the list of YUI components.
* These components can then be dynamically loaded side by side with
* YUI via the `use()` method. This is a hash, the key is the module
* name, and the value is an object literal specifying the metdata
* for the module. See `Loader.addModule` for the supported module
* metadata fields. Also see groups, which provides a way to
* configure the base and combo spec for a set of modules.
*
* modules: {
* mymod1: {
* requires: ['node'],
* fullpath: 'http://myserver.mydomain.com/mymod1/mymod1.js'
* },
* mymod2: {
* requires: ['mymod1'],
* fullpath: 'http://myserver.mydomain.com/mymod2/mymod2.js'
* }
* }
*
* @property modules
* @type object
*/
/**
* A hash of module group definitions. It for each group you
* can specify a list of modules and the base path and
* combo spec to use when dynamically loading the modules.
*
* groups: {
* yui2: {
* // specify whether or not this group has a combo service
* combine: true,
*
* // the base path for non-combo paths
* base: 'http://yui.yahooapis.com/2.8.0r4/build/',
*
* // the path to the combo service
* comboBase: 'http://yui.yahooapis.com/combo?',
*
* // a fragment to prepend to the path attribute when
* // when building combo urls
* root: '2.8.0r4/build/',
*
* // the module definitions
* modules: {
* yui2_yde: {
* path: "yahoo-dom-event/yahoo-dom-event.js"
* },
* yui2_anim: {
* path: "animation/animation.js",
* requires: ['yui2_yde']
* }
* }
* }
* }
*
* @property groups
* @type object
*/
/**
* The loader 'path' attribute to the loader itself. This is combined
* with the 'base' attribute to dynamically load the loader component
* when boostrapping with the get utility alone.
*
* @property loaderPath
* @type string
* @default loader/loader-min.js
*/
/**
* Specifies whether or not YUI().use(...) will attempt to load CSS
* resources at all. Any truthy value will cause CSS dependencies
* to load when fetching script. The special value 'force' will
* cause CSS dependencies to be loaded even if no script is needed.
*
* @property fetchCSS
* @type boolean|string
* @default true
*/
/**
* The default gallery version to build gallery module urls
* @property gallery
* @type string
* @since 3.1.0
*/
/**
* The default YUI 2 version to build yui2 module urls. This is for
* intrinsic YUI 2 support via the 2in3 project. Also see the '2in3'
* config for pulling different revisions of the wrapped YUI 2
* modules.
* @since 3.1.0
* @property yui2
* @type string
* @default 2.8.1
*/
/**
* The 2in3 project is a deployment of the various versions of YUI 2
* deployed as first-class YUI 3 modules. Eventually, the wrapper
* for the modules will change (but the underlying YUI 2 code will
* be the same), and you can select a particular version of
* the wrapper modules via this config.
* @since 3.1.0
* @property 2in3
* @type string
* @default 1
*/
/**
* Alternative console log function for use in environments without
* a supported native console. The function is executed in the
* YUI instance context.
* @since 3.1.0
* @property logFn
* @type Function
*/
/**
* A callback to execute when Y.error is called. It receives the
* error message and an javascript error object if Y.error was
* executed because a javascript error was caught. The function
* is executed in the YUI instance context.
*
* @since 3.2.0
* @property errorFn
* @type Function
*/
/**
* A callback to execute when the loader fails to load one or
* more resource. This could be because of a script load
* failure. It can also fail if a javascript module fails
* to register itself, but only when the 'requireRegistration'
* is true. If this function is defined, the use() callback will
* only be called when the loader succeeds, otherwise it always
* executes unless there was a javascript error when attaching
* a module.
*
* @since 3.3.0
* @property loadErrorFn
* @type Function
*/
/**
* When set to true, the YUI loader will expect that all modules
* it is responsible for loading will be first-class YUI modules
* that register themselves with the YUI global. If this is
* set to true, loader will fail if the module registration fails
* to happen after the script is loaded.
*
* @since 3.3.0
* @property requireRegistration
* @type boolean
* @default false
*/
/**
* Cache serviced use() requests.
* @since 3.3.0
* @property cacheUse
* @type boolean
* @default true
* @deprecated no longer used
*/
/**
* The parameter defaults for the remote loader service. **Requires the rls seed file.** The properties that are supported:
*
* * `m`: comma separated list of module requirements. This
* must be the param name even for custom implemetations.
* * `v`: the version of YUI to load. Defaults to the version
* of YUI that is being used.
* * `gv`: the version of the gallery to load (see the gallery config)
* * `env`: comma separated list of modules already on the page.
* this must be the param name even for custom implemetations.
* * `lang`: the languages supported on the page (see the lang config)
* * `'2in3v'`: the version of the 2in3 wrapper to use (see the 2in3 config).
* * `'2v'`: the version of yui2 to use in the yui 2in3 wrappers
* * `filt`: a filter def to apply to the urls (see the filter config).
* * `filts`: a list of custom filters to apply per module
* * `tests`: this is a map of conditional module test function id keys
* with the values of 1 if the test passes, 0 if not. This must be
* the name of the querystring param in custom templates.
*
* @since 3.2.0
* @property rls
* @type {Object}
*/
/**
* The base path to the remote loader service. **Requires the rls seed file.**
*
* @since 3.2.0
* @property rls_base
* @type {String}
*/
/**
* The template to use for building the querystring portion
* of the remote loader service url. The default is determined
* by the rls config -- each property that has a value will be
* represented. **Requires the rls seed file.**
*
* @since 3.2.0
* @property rls_tmpl
* @type {String}
* @example
* m={m}&v={v}&env={env}&lang={lang}&filt={filt}&tests={tests}
*
*/
/**
* Configure the instance to use a remote loader service instead of
* the client loader. **Requires the rls seed file.**
*
* @since 3.2.0
* @property use_rls
* @type {Boolean}
*/
YUI.add('yui-base', function(Y) {
/*
* YUI stub
* @module yui
* @submodule yui-base
*/
/**
* The YUI module contains the components required for building the YUI
* seed file. This includes the script loading mechanism, a simple queue,
* and the core utilities for the library.
* @module yui
* @submodule yui-base
*/
/**
* Provides core language utilites and extensions used throughout YUI.
*
* @class Lang
* @static
*/
var L = Y.Lang || (Y.Lang = {}),
STRING_PROTO = String.prototype,
TOSTRING = Object.prototype.toString,
TYPES = {
'undefined' : 'undefined',
'number' : 'number',
'boolean' : 'boolean',
'string' : 'string',
'[object Function]': 'function',
'[object RegExp]' : 'regexp',
'[object Array]' : 'array',
'[object Date]' : 'date',
'[object Error]' : 'error'
},
SUBREGEX = /\{\s*([^|}]+?)\s*(?:\|([^}]*))?\s*\}/g,
TRIMREGEX = /^\s+|\s+$/g,
// If either MooTools or Prototype is on the page, then there's a chance that we
// can't trust "native" language features to actually be native. When this is
// the case, we take the safe route and fall back to our own non-native
// implementation.
win = Y.config.win,
unsafeNatives = win && !!(win.MooTools || win.Prototype);
/**
* Determines whether or not the provided item is an array.
*
* Returns `false` for array-like collections such as the function `arguments`
* collection or `HTMLElement` collections. Use `Y.Array.test()` if you want to
* test for an array-like collection.
*
* @method isArray
* @param o The object to test.
* @return {boolean} true if o is an array.
* @static
*/
L.isArray = (!unsafeNatives && Array.isArray) || function (o) {
return L.type(o) === 'array';
};
/**
* Determines whether or not the provided item is a boolean.
* @method isBoolean
* @static
* @param o The object to test.
* @return {boolean} true if o is a boolean.
*/
L.isBoolean = function(o) {
return typeof o === 'boolean';
};
/**
* <p>
* Determines whether or not the provided item is a function.
* Note: Internet Explorer thinks certain functions are objects:
* </p>
*
* <pre>
* var obj = document.createElement("object");
* Y.Lang.isFunction(obj.getAttribute) // reports false in IE
*
* var input = document.createElement("input"); // append to body
* Y.Lang.isFunction(input.focus) // reports false in IE
* </pre>
*
* <p>
* You will have to implement additional tests if these functions
* matter to you.
* </p>
*
* @method isFunction
* @static
* @param o The object to test.
* @return {boolean} true if o is a function.
*/
L.isFunction = function(o) {
return L.type(o) === 'function';
};
/**
* Determines whether or not the supplied item is a date instance.
* @method isDate
* @static
* @param o The object to test.
* @return {boolean} true if o is a date.
*/
L.isDate = function(o) {
return L.type(o) === 'date' && o.toString() !== 'Invalid Date' && !isNaN(o);
};
/**
* Determines whether or not the provided item is null.
* @method isNull
* @static
* @param o The object to test.
* @return {boolean} true if o is null.
*/
L.isNull = function(o) {
return o === null;
};
/**
* Determines whether or not the provided item is a legal number.
* @method isNumber
* @static
* @param o The object to test.
* @return {boolean} true if o is a number.
*/
L.isNumber = function(o) {
return typeof o === 'number' && isFinite(o);
};
/**
* Determines whether or not the provided item is of type object
* or function. Note that arrays are also objects, so
* <code>Y.Lang.isObject([]) === true</code>.
* @method isObject
* @static
* @param o The object to test.
* @param failfn {boolean} fail if the input is a function.
* @return {boolean} true if o is an object.
* @see isPlainObject
*/
L.isObject = function(o, failfn) {
var t = typeof o;
return (o && (t === 'object' ||
(!failfn && (t === 'function' || L.isFunction(o))))) || false;
};
/**
* Determines whether or not the provided item is a string.
* @method isString
* @static
* @param o The object to test.
* @return {boolean} true if o is a string.
*/
L.isString = function(o) {
return typeof o === 'string';
};
/**
* Determines whether or not the provided item is undefined.
* @method isUndefined
* @static
* @param o The object to test.
* @return {boolean} true if o is undefined.
*/
L.isUndefined = function(o) {
return typeof o === 'undefined';
};
/**
* Returns a string without any leading or trailing whitespace. If
* the input is not a string, the input will be returned untouched.
* @method trim
* @static
* @param s {string} the string to trim.
* @return {string} the trimmed string.
*/
L.trim = STRING_PROTO.trim ? function(s) {
return s && s.trim ? s.trim() : s;
} : function (s) {
try {
return s.replace(TRIMREGEX, '');
} catch (e) {
return s;
}
};
/**
* Returns a string without any leading whitespace.
* @method trimLeft
* @static
* @param s {string} the string to trim.
* @return {string} the trimmed string.
*/
L.trimLeft = STRING_PROTO.trimLeft ? function (s) {
return s.trimLeft();
} : function (s) {
return s.replace(/^\s+/, '');
};
/**
* Returns a string without any trailing whitespace.
* @method trimRight
* @static
* @param s {string} the string to trim.
* @return {string} the trimmed string.
*/
L.trimRight = STRING_PROTO.trimRight ? function (s) {
return s.trimRight();
} : function (s) {
return s.replace(/\s+$/, '');
};
/**
* A convenience method for detecting a legitimate non-null value.
* Returns false for null/undefined/NaN, true for other values,
* including 0/false/''
* @method isValue
* @static
* @param o The item to test.
* @return {boolean} true if it is not null/undefined/NaN || false.
*/
L.isValue = function(o) {
var t = L.type(o);
switch (t) {
case 'number':
return isFinite(o);
case 'null': // fallthru
case 'undefined':
return false;
default:
return !!t;
}
};
/**
* <p>
* Returns a string representing the type of the item passed in.
* </p>
*
* <p>
* Known issues:
* </p>
*
* <ul>
* <li>
* <code>typeof HTMLElementCollection</code> returns function in Safari, but
* <code>Y.type()</code> reports object, which could be a good thing --
* but it actually caused the logic in <code>Y.Lang.isObject</code> to fail.
* </li>
* </ul>
*
* @method type
* @param o the item to test.
* @return {string} the detected type.
* @static
*/
L.type = function(o) {
return TYPES[typeof o] || TYPES[TOSTRING.call(o)] || (o ? 'object' : 'null');
};
/**
* Lightweight version of <code>Y.substitute</code>. Uses the same template
* structure as <code>Y.substitute</code>, but doesn't support recursion,
* auto-object coersion, or formats.
* @method sub
* @param {string} s String to be modified.
* @param {object} o Object containing replacement values.
* @return {string} the substitute result.
* @static
* @since 3.2.0
*/
L.sub = function(s, o) {
return s.replace ? s.replace(SUBREGEX, function (match, key) {
return L.isUndefined(o[key]) ? match : o[key];
}) : s;
};
/**
* Returns the current time in milliseconds.
*
* @method now
* @return {Number} Current time in milliseconds.
* @static
* @since 3.3.0
*/
L.now = Date.now || function () {
return new Date().getTime();
};
/**
* The YUI module contains the components required for building the YUI seed
* file. This includes the script loading mechanism, a simple queue, and the
* core utilities for the library.
*
* @module yui
* @submodule yui-base
*/
var Lang = Y.Lang,
Native = Array.prototype,
hasOwn = Object.prototype.hasOwnProperty;
/**
Provides utility methods for working with arrays. Additional array helpers can
be found in the `collection` and `array-extras` modules.
`Y.Array(thing)` returns a native array created from _thing_. Depending on
_thing_'s type, one of the following will happen:
* Arrays are returned unmodified unless a non-zero _startIndex_ is
specified.
* Array-like collections (see `Array.test()`) are converted to arrays.
* For everything else, a new array is created with _thing_ as the sole
item.
Note: elements that are also collections, such as `<form>` and `<select>`
elements, are not automatically converted to arrays. To force a conversion,
pass `true` as the value of the _force_ parameter.
@class Array
@constructor
@param {Any} thing The thing to arrayify.
@param {Number} [startIndex=0] If non-zero and _thing_ is an array or array-like
collection, a subset of items starting at the specified index will be
returned.
@param {Boolean} [force=false] If `true`, _thing_ will be treated as an
array-like collection no matter what.
@return {Array} A native array created from _thing_, according to the rules
described above.
**/
function YArray(thing, startIndex, force) {
var len, result;
startIndex || (startIndex = 0);
if (force || YArray.test(thing)) {
// IE throws when trying to slice HTMLElement collections.
try {
return Native.slice.call(thing, startIndex);
} catch (ex) {
result = [];
for (len = thing.length; startIndex < len; ++startIndex) {
result.push(thing[startIndex]);
}
return result;
}
}
return [thing];
}
Y.Array = YArray;
/**
Evaluates _obj_ to determine if it's an array, an array-like collection, or
something else. This is useful when working with the function `arguments`
collection and `HTMLElement` collections.
Note: This implementation doesn't consider elements that are also
collections, such as `<form>` and `<select>`, to be array-like.
@method test
@param {Object} obj Object to test.
@return {Number} A number indicating the results of the test:
* 0: Neither an array nor an array-like collection.
* 1: Real array.
* 2: Array-like collection.
@static
**/
YArray.test = function (obj) {
var result = 0;
if (Lang.isArray(obj)) {
result = 1;
} else if (Lang.isObject(obj)) {
try {
// indexed, but no tagName (element) or alert (window),
// or functions without apply/call (Safari
// HTMLElementCollection bug).
if ('length' in obj && !obj.tagName && !obj.alert && !obj.apply) {
result = 2;
}
} catch (ex) {}
}
return result;
};
/**
Dedupes an array of strings, returning an array that's guaranteed to contain
only one copy of a given string.
This method differs from `Array.unique()` in that it's optimized for use only
with strings, whereas `unique` may be used with other types (but is slower).
Using `dedupe()` with non-string values may result in unexpected behavior.
@method dedupe
@param {String[]} array Array of strings to dedupe.
@return {Array} Deduped copy of _array_.
@static
@since 3.4.0
**/
YArray.dedupe = function (array) {
var hash = {},
results = [],
i, item, len;
for (i = 0, len = array.length; i < len; ++i) {
item = array[i];
if (!hasOwn.call(hash, item)) {
hash[item] = 1;
results.push(item);
}
}
return results;
};
/**
Executes the supplied function on each item in the array. This method wraps
the native ES5 `Array.forEach()` method if available.
@method each
@param {Array} array Array to iterate.
@param {Function} fn Function to execute on each item in the array. The function
will receive the following arguments:
@param {Any} fn.item Current array item.
@param {Number} fn.index Current array index.
@param {Array} fn.array Array being iterated.
@param {Object} [thisObj] `this` object to use when calling _fn_.
@return {YUI} The YUI instance.
@static
**/
YArray.each = YArray.forEach = Native.forEach ? function (array, fn, thisObj) {
Native.forEach.call(array || [], fn, thisObj || Y);
return Y;
} : function (array, fn, thisObj) {
for (var i = 0, len = (array && array.length) || 0; i < len; ++i) {
if (i in array) {
fn.call(thisObj || Y, array[i], i, array);
}
}
return Y;
};
/**
Alias for `each()`.
@method forEach
@static
**/
/**
Returns an object using the first array as keys and the second as values. If
the second array is not provided, or if it doesn't contain the same number of
values as the first array, then `true` will be used in place of the missing
values.
@example
Y.Array.hash(['a', 'b', 'c'], ['foo', 'bar']);
// => {a: 'foo', b: 'bar', c: true}
@method hash
@param {String[]} keys Array of strings to use as keys.
@param {Array} [values] Array to use as values.
@return {Object} Hash using the first array as keys and the second as values.
@static
**/
YArray.hash = function (keys, values) {
var hash = {},
vlen = (values && values.length) || 0,
i, len;
for (i = 0, len = keys.length; i < len; ++i) {
if (i in keys) {
hash[keys[i]] = vlen > i && i in values ? values[i] : true;
}
}
return hash;
};
/**
Returns the index of the first item in the array that's equal (using a strict
equality check) to the specified _value_, or `-1` if the value isn't found.
This method wraps the native ES5 `Array.indexOf()` method if available.
@method indexOf
@param {Array} array Array to search.
@param {Any} value Value to search for.
@return {Number} Index of the item strictly equal to _value_, or `-1` if not
found.
@static
**/
YArray.indexOf = Native.indexOf ? function (array, value) {
// TODO: support fromIndex
return Native.indexOf.call(array, value);
} : function (array, value) {
for (var i = 0, len = array.length; i < len; ++i) {
if (array[i] === value) {
return i;
}
}
return -1;
};
/**
Numeric sort convenience function.
The native `Array.prototype.sort()` function converts values to strings and
sorts them in lexicographic order, which is unsuitable for sorting numeric
values. Provide `Array.numericSort` as a custom sort function when you want
to sort values in numeric order.
@example
[42, 23, 8, 16, 4, 15].sort(Y.Array.numericSort);
// => [4, 8, 15, 16, 23, 42]
@method numericSort
@param {Number} a First value to compare.
@param {Number} b Second value to compare.
@return {Number} Difference between _a_ and _b_.
@static
**/
YArray.numericSort = function (a, b) {
return a - b;
};
/**
Executes the supplied function on each item in the array. Returning a truthy
value from the function will stop the processing of remaining items.
@method some
@param {Array} array Array to iterate over.
@param {Function} fn Function to execute on each item. The function will receive
the following arguments:
@param {Any} fn.value Current array item.
@param {Number} fn.index Current array index.
@param {Array} fn.array Array being iterated over.
@param {Object} [thisObj] `this` object to use when calling _fn_.
@return {Boolean} `true` if the function returns a truthy value on any of the
items in the array; `false` otherwise.
@static
**/
YArray.some = Native.some ? function (array, fn, thisObj) {
return Native.some.call(array, fn, thisObj);
} : function (array, fn, thisObj) {
for (var i = 0, len = array.length; i < len; ++i) {
if (i in array && fn.call(thisObj, array[i], i, array)) {
return true;
}
}
return false;
};
/**
* The YUI module contains the components required for building the YUI
* seed file. This includes the script loading mechanism, a simple queue,
* and the core utilities for the library.
* @module yui
* @submodule yui-base
*/
/**
* A simple FIFO queue. Items are added to the Queue with add(1..n items) and
* removed using next().
*
* @class Queue
* @constructor
* @param {MIXED} item* 0..n items to seed the queue.
*/
function Queue() {
this._init();
this.add.apply(this, arguments);
}
Queue.prototype = {
/**
* Initialize the queue
*
* @method _init
* @protected
*/
_init: function() {
/**
* The collection of enqueued items
*
* @property _q
* @type Array
* @protected
*/
this._q = [];
},
/**
* Get the next item in the queue. FIFO support
*
* @method next
* @return {MIXED} the next item in the queue.
*/
next: function() {
return this._q.shift();
},
/**
* Get the last in the queue. LIFO support.
*
* @method last
* @return {MIXED} the last item in the queue.
*/
last: function() {
return this._q.pop();
},
/**
* Add 0..n items to the end of the queue.
*
* @method add
* @param {MIXED} item* 0..n items.
* @return {object} this queue.
*/
add: function() {
this._q.push.apply(this._q, arguments);
return this;
},
/**
* Returns the current number of queued items.
*
* @method size
* @return {Number} The size.
*/
size: function() {
return this._q.length;
}
};
Y.Queue = Queue;
YUI.Env._loaderQueue = YUI.Env._loaderQueue || new Queue();
/**
The YUI module contains the components required for building the YUI seed file.
This includes the script loading mechanism, a simple queue, and the core
utilities for the library.
@module yui
@submodule yui-base
**/
var CACHED_DELIMITER = '__',
hasOwn = Object.prototype.hasOwnProperty,
isObject = Y.Lang.isObject;
/**
Returns a wrapper for a function which caches the return value of that function,
keyed off of the combined string representation of the argument values provided
when the wrapper is called.
Calling this function again with the same arguments will return the cached value
rather than executing the wrapped function.
Note that since the cache is keyed off of the string representation of arguments
passed to the wrapper function, arguments that aren't strings and don't provide
a meaningful `toString()` method may result in unexpected caching behavior. For
example, the objects `{}` and `{foo: 'bar'}` would both be converted to the
string `[object Object]` when used as a cache key.
@method cached
@param {Function} source The function to memoize.
@param {Object} [cache={}] Object in which to store cached values. You may seed
this object with pre-existing cached values if desired.
@param {any} [refetch] If supplied, this value is compared with the cached value
using a `==` comparison. If the values are equal, the wrapped function is
executed again even though a cached value exists.
@return {Function} Wrapped function.
@for YUI
**/
Y.cached = function (source, cache, refetch) {
cache || (cache = {});
return function (arg) {
var key = arguments.length > 1 ?
Array.prototype.join.call(arguments, CACHED_DELIMITER) :
arg.toString();
if (!(key in cache) || (refetch && cache[key] == refetch)) {
cache[key] = source.apply(source, arguments);
}
return cache[key];
};
};
/**
Returns a new object containing all of the properties of all the supplied
objects. The properties from later objects will overwrite those in earlier
objects.
Passing in a single object will create a shallow copy of it. For a deep copy,
use `clone()`.
@method merge
@param {Object} objects* One or more objects to merge.
@return {Object} A new merged object.
**/
Y.merge = function () {
var args = arguments,
i = 0,
len = args.length,
result = {};
for (; i < len; ++i) {
Y.mix(result, args[i], true);
}
return result;
};
/**
Mixes _supplier_'s properties into _receiver_. Properties will not be
overwritten or merged unless the _overwrite_ or _merge_ parameters are `true`,
respectively.
In the default mode (0), only properties the supplier owns are copied (prototype
properties are not copied). The following copying modes are available:
* `0`: _Default_. Object to object.
* `1`: Prototype to prototype.
* `2`: Prototype to prototype and object to object.
* `3`: Prototype to object.
* `4`: Object to prototype.
@method mix
@param {Function|Object} receiver The object or function to receive the mixed
properties.
@param {Function|Object} supplier The object or function supplying the
properties to be mixed.
@param {Boolean} [overwrite=false] If `true`, properties that already exist
on the receiver will be overwritten with properties from the supplier.
@param {String[]} [whitelist] An array of property names to copy. If
specified, only the whitelisted properties will be copied, and all others
will be ignored.
@param {Int} [mode=0] Mix mode to use. See above for available modes.
@param {Boolean} [merge=false] If `true`, objects and arrays that already
exist on the receiver will have the corresponding object/array from the
supplier merged into them, rather than being skipped or overwritten. When
both _overwrite_ and _merge_ are `true`, _merge_ takes precedence.
@return {Function|Object|YUI} The receiver, or the YUI instance if the
specified receiver is falsy.
**/
Y.mix = function(receiver, supplier, overwrite, whitelist, mode, merge) {
var alwaysOverwrite, exists, from, i, key, len, to;
// If no supplier is given, we return the receiver. If no receiver is given,
// we return Y. Returning Y doesn't make much sense to me, but it's
// grandfathered in for backcompat reasons.
if (!receiver || !supplier) {
return receiver || Y;
}
if (mode) {
// In mode 2 (prototype to prototype and object to object), we recurse
// once to do the proto to proto mix. The object to object mix will be
// handled later on.
if (mode === 2) {
Y.mix(receiver.prototype, supplier.prototype, overwrite,
whitelist, 0, merge);
}
// Depending on which mode is specified, we may be copying from or to
// the prototypes of the supplier and receiver.
from = mode === 1 || mode === 3 ? supplier.prototype : supplier;
to = mode === 1 || mode === 4 ? receiver.prototype : receiver;
// If either the supplier or receiver doesn't actually have a
// prototype property, then we could end up with an undefined `from`
// or `to`. If that happens, we abort and return the receiver.
if (!from || !to) {
return receiver;
}
} else {
from = supplier;
to = receiver;
}
// If `overwrite` is truthy and `merge` is falsy, then we can skip a call
// to `hasOwnProperty` on each iteration and save some time.
alwaysOverwrite = overwrite && !merge;
if (whitelist) {
for (i = 0, len = whitelist.length; i < len; ++i) {
key = whitelist[i];
// We call `Object.prototype.hasOwnProperty` instead of calling
// `hasOwnProperty` on the object itself, since the object's
// `hasOwnProperty` method may have been overridden or removed.
// Also, some native objects don't implement a `hasOwnProperty`
// method.
if (!hasOwn.call(from, key)) {
continue;
}
exists = alwaysOverwrite ? false : hasOwn.call(to, key);
if (merge && exists && isObject(to[key], true)
&& isObject(from[key], true)) {
// If we're in merge mode, and the key is present on both
// objects, and the value on both objects is either an object or
// an array (but not a function), then we recurse to merge the
// `from` value into the `to` value instead of overwriting it.
//
// Note: It's intentional that the whitelist isn't passed to the
// recursive call here. This is legacy behavior that lots of
// code still depends on.
Y.mix(to[key], from[key], overwrite, null, 0, merge);
} else if (overwrite || !exists) {
// We're not in merge mode, so we'll only copy the `from` value
// to the `to` value if we're in overwrite mode or if the
// current key doesn't exist on the `to` object.
to[key] = from[key];
}
}
} else {
for (key in from) {
// The code duplication here is for runtime performance reasons.
// Combining whitelist and non-whitelist operations into a single
// loop or breaking the shared logic out into a function both result
// in worse performance, and Y.mix is critical enough that the byte
// tradeoff is worth it.
if (!hasOwn.call(from, key)) {
continue;
}
exists = alwaysOverwrite ? false : hasOwn.call(to, key);
if (merge && exists && isObject(to[key], true)
&& isObject(from[key], true)) {
Y.mix(to[key], from[key], overwrite, null, 0, merge);
} else if (overwrite || !exists) {
to[key] = from[key];
}
}
// If this is an IE browser with the JScript enumeration bug, force
// enumeration of the buggy properties by making a recursive call with
// the buggy properties as the whitelist.
if (Y.Object._hasEnumBug) {
Y.mix(to, from, overwrite, Y.Object._forceEnum, mode, merge);
}
}
return receiver;
};
/**
* The YUI module contains the components required for building the YUI
* seed file. This includes the script loading mechanism, a simple queue,
* and the core utilities for the library.
* @module yui
* @submodule yui-base
*/
/**
* Adds utilities to the YUI instance for working with objects.
*
* @class Object
*/
var hasOwn = Object.prototype.hasOwnProperty,
// If either MooTools or Prototype is on the page, then there's a chance that we
// can't trust "native" language features to actually be native. When this is
// the case, we take the safe route and fall back to our own non-native
// implementations.
win = Y.config.win,
unsafeNatives = win && !!(win.MooTools || win.Prototype),
UNDEFINED, // <-- Note the comma. We're still declaring vars.
/**
* Returns a new object that uses _obj_ as its prototype. This method wraps the
* native ES5 `Object.create()` method if available, but doesn't currently
* pass through `Object.create()`'s second argument (properties) in order to
* ensure compatibility with older browsers.
*
* @method ()
* @param {Object} obj Prototype object.
* @return {Object} New object using _obj_ as its prototype.
* @static
*/
O = Y.Object = (!unsafeNatives && Object.create) ? function (obj) {
// We currently wrap the native Object.create instead of simply aliasing it
// to ensure consistency with our fallback shim, which currently doesn't
// support Object.create()'s second argument (properties). Once we have a
// safe fallback for the properties arg, we can stop wrapping
// Object.create().
return Object.create(obj);
} : (function () {
// Reusable constructor function for the Object.create() shim.
function F() {}
// The actual shim.
return function (obj) {
F.prototype = obj;
return new F();
};
}()),
/**
* Property names that IE doesn't enumerate in for..in loops, even when they
* should be enumerable. When `_hasEnumBug` is `true`, it's necessary to
* manually enumerate these properties.
*
* @property _forceEnum
* @type String[]
* @protected
* @static
*/
forceEnum = O._forceEnum = [
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'toString',
'toLocaleString',
'valueOf'
],
/**
* `true` if this browser has the JScript enumeration bug that prevents
* enumeration of the properties named in the `_forceEnum` array, `false`
* otherwise.
*
* See:
* - <https://developer.mozilla.org/en/ECMAScript_DontEnum_attribute#JScript_DontEnum_Bug>
* - <http://whattheheadsaid.com/2010/10/a-safer-object-keys-compatibility-implementation>
*
* @property _hasEnumBug
* @type {Boolean}
* @protected
* @static
*/
hasEnumBug = O._hasEnumBug = !{valueOf: 0}.propertyIsEnumerable('valueOf'),
/**
* Returns `true` if _key_ exists on _obj_, `false` if _key_ doesn't exist or
* exists only on _obj_'s prototype. This is essentially a safer version of
* `obj.hasOwnProperty()`.
*
* @method owns
* @param {Object} obj Object to test.
* @param {String} key Property name to look for.
* @return {Boolean} `true` if _key_ exists on _obj_, `false` otherwise.
* @static
*/
owns = O.owns = function (obj, key) {
return !!obj && hasOwn.call(obj, key);
}; // <-- End of var declarations.
/**
* Alias for `owns()`.
*
* @method hasKey
* @param {Object} obj Object to test.
* @param {String} key Property name to look for.
* @return {Boolean} `true` if _key_ exists on _obj_, `false` otherwise.
* @static
*/
O.hasKey = owns;
/**
* Returns an array containing the object's enumerable keys. Does not include
* prototype keys or non-enumerable keys.
*
* Note that keys are returned in enumeration order (that is, in the same order
* that they would be enumerated by a `for-in` loop), which may not be the same
* as the order in which they were defined.
*
* This method is an alias for the native ES5 `Object.keys()` method if
* available.
*
* @example
*
* Y.Object.keys({a: 'foo', b: 'bar', c: 'baz'});
* // => ['a', 'b', 'c']
*
* @method keys
* @param {Object} obj An object.
* @return {String[]} Array of keys.
* @static
*/
O.keys = (!unsafeNatives && Object.keys) || function (obj) {
if (!Y.Lang.isObject(obj)) {
throw new TypeError('Object.keys called on a non-object');
}
var keys = [],
i, key, len;
for (key in obj) {
if (owns(obj, key)) {
keys.push(key);
}
}
if (hasEnumBug) {
for (i = 0, len = forceEnum.length; i < len; ++i) {
key = forceEnum[i];
if (owns(obj, key)) {
keys.push(key);
}
}
}
return keys;
};
/**
* Returns an array containing the values of the object's enumerable keys.
*
* Note that values are returned in enumeration order (that is, in the same
* order that they would be enumerated by a `for-in` loop), which may not be the
* same as the order in which they were defined.
*
* @example
*
* Y.Object.values({a: 'foo', b: 'bar', c: 'baz'});
* // => ['foo', 'bar', 'baz']
*
* @method values
* @param {Object} obj An object.
* @return {Array} Array of values.
* @static
*/
O.values = function (obj) {
var keys = O.keys(obj),
i = 0,
len = keys.length,
values = [];
for (; i < len; ++i) {
values.push(obj[keys[i]]);
}
return values;
};
/**
* Returns the number of enumerable keys owned by an object.
*
* @method size
* @param {Object} obj An object.
* @return {Number} The object's size.
* @static
*/
O.size = function (obj) {
return O.keys(obj).length;
};
/**
* Returns `true` if the object owns an enumerable property with the specified
* value.
*
* @method hasValue
* @param {Object} obj An object.
* @param {any} value The value to search for.
* @return {Boolean} `true` if _obj_ contains _value_, `false` otherwise.
* @static
*/
O.hasValue = function (obj, value) {
return Y.Array.indexOf(O.values(obj), value) > -1;
};
/**
* Executes a function on each enumerable property in _obj_. The function
* receives the value, the key, and the object itself as parameters (in that
* order).
*
* By default, only properties owned by _obj_ are enumerated. To include
* prototype properties, set the _proto_ parameter to `true`.
*
* @method each
* @param {Object} obj Object to enumerate.
* @param {Function} fn Function to execute on each enumerable property.
* @param {mixed} fn.value Value of the current property.
* @param {String} fn.key Key of the current property.
* @param {Object} fn.obj Object being enumerated.
* @param {Object} [thisObj] `this` object to use when calling _fn_.
* @param {Boolean} [proto=false] Include prototype properties.
* @return {YUI} the YUI instance.
* @chainable
* @static
*/
O.each = function (obj, fn, thisObj, proto) {
var key;
for (key in obj) {
if (proto || owns(obj, key)) {
fn.call(thisObj || Y, obj[key], key, obj);
}
}
return Y;
};
/**
* Executes a function on each enumerable property in _obj_, but halts if the
* function returns a truthy value. The function receives the value, the key,
* and the object itself as paramters (in that order).
*
* By default, only properties owned by _obj_ are enumerated. To include
* prototype properties, set the _proto_ parameter to `true`.
*
* @method some
* @param {Object} obj Object to enumerate.
* @param {Function} fn Function to execute on each enumerable property.
* @param {mixed} fn.value Value of the current property.
* @param {String} fn.key Key of the current property.
* @param {Object} fn.obj Object being enumerated.
* @param {Object} [thisObj] `this` object to use when calling _fn_.
* @param {Boolean} [proto=false] Include prototype properties.
* @return {Boolean} `true` if any execution of _fn_ returns a truthy value,
* `false` otherwise.
* @static
*/
O.some = function (obj, fn, thisObj, proto) {
var key;
for (key in obj) {
if (proto || owns(obj, key)) {
if (fn.call(thisObj || Y, obj[key], key, obj)) {
return true;
}
}
}
return false;
};
/**
* Retrieves the sub value at the provided path,
* from the value object provided.
*
* @method getValue
* @static
* @param o The object from which to extract the property value.
* @param path {Array} A path array, specifying the object traversal path
* from which to obtain the sub value.
* @return {Any} The value stored in the path, undefined if not found,
* undefined if the source is not an object. Returns the source object
* if an empty path is provided.
*/
O.getValue = function(o, path) {
if (!Y.Lang.isObject(o)) {
return UNDEFINED;
}
var i,
p = Y.Array(path),
l = p.length;
for (i = 0; o !== UNDEFINED && i < l; i++) {
o = o[p[i]];
}
return o;
};
/**
* Sets the sub-attribute value at the provided path on the
* value object. Returns the modified value object, or
* undefined if the path is invalid.
*
* @method setValue
* @static
* @param o The object on which to set the sub value.
* @param path {Array} A path array, specifying the object traversal path
* at which to set the sub value.
* @param val {Any} The new value for the sub-attribute.
* @return {Object} The modified object, with the new sub value set, or
* undefined, if the path was invalid.
*/
O.setValue = function(o, path, val) {
var i,
p = Y.Array(path),
leafIdx = p.length - 1,
ref = o;
if (leafIdx >= 0) {
for (i = 0; ref !== UNDEFINED && i < leafIdx; i++) {
ref = ref[p[i]];
}
if (ref !== UNDEFINED) {
ref[p[i]] = val;
} else {
return UNDEFINED;
}
}
return o;
};
/**
* Returns `true` if the object has no enumerable properties of its own.
*
* @method isEmpty
* @param {Object} obj An object.
* @return {Boolean} `true` if the object is empty.
* @static
* @since 3.2.0
*/
O.isEmpty = function (obj) {
return !O.keys(obj).length;
};
/**
* The YUI module contains the components required for building the YUI seed
* file. This includes the script loading mechanism, a simple queue, and the
* core utilities for the library.
* @module yui
* @submodule yui-base
*/
/**
* YUI user agent detection.
* Do not fork for a browser if it can be avoided. Use feature detection when
* you can. Use the user agent as a last resort. For all fields listed
* as @type float, UA stores a version number for the browser engine,
* 0 otherwise. This value may or may not map to the version number of
* the browser using the engine. The value is presented as a float so
* that it can easily be used for boolean evaluation as well as for
* looking for a particular range of versions. Because of this,
* some of the granularity of the version info may be lost. The fields that
* are @type string default to null. The API docs list the values that
* these fields can have.
* @class UA
* @static
*/
/**
* Static method for parsing the UA string. Defaults to assigning it's value to Y.UA
* @static
* @method Env.parseUA
* @param {String} subUA Parse this UA string instead of navigator.userAgent
* @returns {Object} The Y.UA object
*/
YUI.Env.parseUA = function(subUA) {
var numberify = function(s) {
var c = 0;
return parseFloat(s.replace(/\./g, function() {
return (c++ == 1) ? '' : '.';
}));
},
win = Y.config.win,
nav = win && win.navigator,
o = {
/**
* Internet Explorer version number or 0. Example: 6
* @property ie
* @type float
* @static
*/
ie: 0,
/**
* Opera version number or 0. Example: 9.2
* @property opera
* @type float
* @static
*/
opera: 0,
/**
* Gecko engine revision number. Will evaluate to 1 if Gecko
* is detected but the revision could not be found. Other browsers
* will be 0. Example: 1.8
* <pre>
* Firefox 1.0.0.4: 1.7.8 <-- Reports 1.7
* Firefox 1.5.0.9: 1.8.0.9 <-- 1.8
* Firefox 2.0.0.3: 1.8.1.3 <-- 1.81
* Firefox 3.0 <-- 1.9
* Firefox 3.5 <-- 1.91
* </pre>
* @property gecko
* @type float
* @static
*/
gecko: 0,
/**
* AppleWebKit version. KHTML browsers that are not WebKit browsers
* will evaluate to 1, other browsers 0. Example: 418.9
* <pre>
* Safari 1.3.2 (312.6): 312.8.1 <-- Reports 312.8 -- currently the
* latest available for Mac OSX 10.3.
* Safari 2.0.2: 416 <-- hasOwnProperty introduced
* Safari 2.0.4: 418 <-- preventDefault fixed
* Safari 2.0.4 (419.3): 418.9.1 <-- One version of Safari may run
* different versions of webkit
* Safari 2.0.4 (419.3): 419 <-- Tiger installations that have been
* updated, but not updated
* to the latest patch.
* Webkit 212 nightly: 522+ <-- Safari 3.0 precursor (with native
* SVG and many major issues fixed).
* Safari 3.0.4 (523.12) 523.12 <-- First Tiger release - automatic
* update from 2.x via the 10.4.11 OS patch.
* Webkit nightly 1/2008:525+ <-- Supports DOMContentLoaded event.
* yahoo.com user agent hack removed.
* </pre>
* http://en.wikipedia.org/wiki/Safari_version_history
* @property webkit
* @type float
* @static
*/
webkit: 0,
/**
* Safari will be detected as webkit, but this property will also
* be populated with the Safari version number
* @property safari
* @type float
* @static
*/
safari: 0,
/**
* Chrome will be detected as webkit, but this property will also
* be populated with the Chrome version number
* @property chrome
* @type float
* @static
*/
chrome: 0,
/**
* The mobile property will be set to a string containing any relevant
* user agent information when a modern mobile browser is detected.
* Currently limited to Safari on the iPhone/iPod Touch, Nokia N-series
* devices with the WebKit-based browser, and Opera Mini.
* @property mobile
* @type string
* @default null
* @static
*/
mobile: null,
/**
* Adobe AIR version number or 0. Only populated if webkit is detected.
* Example: 1.0
* @property air
* @type float
*/
air: 0,
/**
* Detects Apple iPad's OS version
* @property ipad
* @type float
* @static
*/
ipad: 0,
/**
* Detects Apple iPhone's OS version
* @property iphone
* @type float
* @static
*/
iphone: 0,
/**
* Detects Apples iPod's OS version
* @property ipod
* @type float
* @static
*/
ipod: 0,
/**
* General truthy check for iPad, iPhone or iPod
* @property ios
* @type float
* @default null
* @static
*/
ios: null,
/**
* Detects Googles Android OS version
* @property android
* @type float
* @static
*/
android: 0,
/**
* Detects Palms WebOS version
* @property webos
* @type float
* @static
*/
webos: 0,
/**
* Google Caja version number or 0.
* @property caja
* @type float
*/
caja: nav && nav.cajaVersion,
/**
* Set to true if the page appears to be in SSL
* @property secure
* @type boolean
* @static
*/
secure: false,
/**
* The operating system. Currently only detecting windows or macintosh
* @property os
* @type string
* @default null
* @static
*/
os: null
},
ua = subUA || nav && nav.userAgent,
loc = win && win.location,
href = loc && loc.href,
m;
o.secure = href && (href.toLowerCase().indexOf('https') === 0);
if (ua) {
if ((/windows|win32/i).test(ua)) {
o.os = 'windows';
} else if ((/macintosh/i).test(ua)) {
o.os = 'macintosh';
} else if ((/rhino/i).test(ua)) {
o.os = 'rhino';
}
// Modern KHTML browsers should qualify as Safari X-Grade
if ((/KHTML/).test(ua)) {
o.webkit = 1;
}
// Modern WebKit browsers are at least X-Grade
m = ua.match(/AppleWebKit\/([^\s]*)/);
if (m && m[1]) {
o.webkit = numberify(m[1]);
o.safari = o.webkit;
// Mobile browser check
if (/ Mobile\//.test(ua)) {
o.mobile = 'Apple'; // iPhone or iPod Touch
m = ua.match(/OS ([^\s]*)/);
if (m && m[1]) {
m = numberify(m[1].replace('_', '.'));
}
o.ios = m;
o.ipad = o.ipod = o.iphone = 0;
m = ua.match(/iPad|iPod|iPhone/);
if (m && m[0]) {
o[m[0].toLowerCase()] = o.ios;
}
} else {
m = ua.match(/NokiaN[^\/]*|webOS\/\d\.\d/);
if (m) {
// Nokia N-series, webOS, ex: NokiaN95
o.mobile = m[0];
}
if (/webOS/.test(ua)) {
o.mobile = 'WebOS';
m = ua.match(/webOS\/([^\s]*);/);
if (m && m[1]) {
o.webos = numberify(m[1]);
}
}
if (/ Android/.test(ua)) {
if (/Mobile/.test(ua)) {
o.mobile = 'Android';
}
m = ua.match(/Android ([^\s]*);/);
if (m && m[1]) {
o.android = numberify(m[1]);
}
}
}
m = ua.match(/Chrome\/([^\s]*)/);
if (m && m[1]) {
o.chrome = numberify(m[1]); // Chrome
o.safari = 0; //Reset safari back to 0
} else {
m = ua.match(/AdobeAIR\/([^\s]*)/);
if (m) {
o.air = m[0]; // Adobe AIR 1.0 or better
}
}
}
if (!o.webkit) { // not webkit
// @todo check Opera/8.01 (J2ME/MIDP; Opera Mini/2.0.4509/1316; fi; U; ssr)
m = ua.match(/Opera[\s\/]([^\s]*)/);
if (m && m[1]) {
o.opera = numberify(m[1]);
m = ua.match(/Version\/([^\s]*)/);
if (m && m[1]) {
o.opera = numberify(m[1]); // opera 10+
}
m = ua.match(/Opera Mini[^;]*/);
if (m) {
o.mobile = m[0]; // ex: Opera Mini/2.0.4509/1316
}
} else { // not opera or webkit
m = ua.match(/MSIE\s([^;]*)/);
if (m && m[1]) {
o.ie = numberify(m[1]);
} else { // not opera, webkit, or ie
m = ua.match(/Gecko\/([^\s]*)/);
if (m) {
o.gecko = 1; // Gecko detected, look for revision
m = ua.match(/rv:([^\s\)]*)/);
if (m && m[1]) {
o.gecko = numberify(m[1]);
}
}
}
}
}
}
YUI.Env.UA = o;
return o;
};
Y.UA = YUI.Env.UA || YUI.Env.parseUA();
YUI.Env.aliases = {
"anim": ["anim-base","anim-color","anim-curve","anim-easing","anim-node-plugin","anim-scroll","anim-xy"],
"app": ["controller","model","model-list","view"],
"attribute": ["attribute-base","attribute-complex"],
"autocomplete": ["autocomplete-base","autocomplete-sources","autocomplete-list","autocomplete-plugin"],
"base": ["base-base","base-pluginhost","base-build"],
"cache": ["cache-base","cache-offline","cache-plugin"],
"collection": ["array-extras","arraylist","arraylist-add","arraylist-filter","array-invoke"],
"dataschema": ["dataschema-base","dataschema-json","dataschema-xml","dataschema-array","dataschema-text"],
"datasource": ["datasource-local","datasource-io","datasource-get","datasource-function","datasource-cache","datasource-jsonschema","datasource-xmlschema","datasource-arrayschema","datasource-textschema","datasource-polling"],
"datatable": ["datatable-base","datatable-datasource","datatable-sort","datatable-scroll"],
"datatype": ["datatype-number","datatype-date","datatype-xml"],
"datatype-date": ["datatype-date-parse","datatype-date-format"],
"datatype-number": ["datatype-number-parse","datatype-number-format"],
"datatype-xml": ["datatype-xml-parse","datatype-xml-format"],
"dd": ["dd-ddm-base","dd-ddm","dd-ddm-drop","dd-drag","dd-proxy","dd-constrain","dd-drop","dd-scroll","dd-delegate"],
"dom": ["dom-base","dom-screen","dom-style","selector-native","selector"],
"editor": ["frame","selection","exec-command","editor-base","editor-para","editor-br","editor-bidi","editor-tab","createlink-base"],
"event": ["event-base","event-delegate","event-synthetic","event-mousewheel","event-mouseenter","event-key","event-focus","event-resize","event-hover","event-outside"],
"event-custom": ["event-custom-base","event-custom-complex"],
"event-gestures": ["event-flick","event-move"],
"highlight": ["highlight-base","highlight-accentfold"],
"history": ["history-base","history-hash","history-hash-ie","history-html5"],
"io": ["io-base","io-xdr","io-form","io-upload-iframe","io-queue"],
"json": ["json-parse","json-stringify"],
"loader": ["loader-base","loader-rollup","loader-yui3"],
"node": ["node-base","node-event-delegate","node-pluginhost","node-screen","node-style"],
"pluginhost": ["pluginhost-base","pluginhost-config"],
"querystring": ["querystring-parse","querystring-stringify"],
"recordset": ["recordset-base","recordset-sort","recordset-filter","recordset-indexer"],
"resize": ["resize-base","resize-proxy","resize-constrain"],
"slider": ["slider-base","slider-value-range","clickable-rail","range-slider"],
"text": ["text-accentfold","text-wordbreak"],
"widget": ["widget-base","widget-htmlparser","widget-uievents","widget-skin"]
};
}, '@VERSION@' );
YUI.add('get', function(Y) {
/**
* Provides a mechanism to fetch remote resources and
* insert them into a document.
* @module yui
* @submodule get
*/
/**
* Fetches and inserts one or more script or link nodes into the document
* @class Get
* @static
*/
var ua = Y.UA,
L = Y.Lang,
TYPE_JS = 'text/javascript',
TYPE_CSS = 'text/css',
STYLESHEET = 'stylesheet',
SCRIPT = 'script',
AUTOPURGE = 'autopurge',
UTF8 = 'utf-8',
LINK = 'link',
ASYNC = 'async',
ALL = true,
// FireFox does not support the onload event for link nodes, so
// there is no way to make the css requests synchronous. This means
// that the css rules in multiple files could be applied out of order
// in this browser if a later request returns before an earlier one.
// Safari too.
ONLOAD_SUPPORTED = {
script: ALL,
css: !(ua.webkit || ua.gecko)
},
/**
* hash of queues to manage multiple requests
* @property queues
* @private
*/
queues = {},
/**
* queue index used to generate transaction ids
* @property qidx
* @type int
* @private
*/
qidx = 0,
/**
* interal property used to prevent multiple simultaneous purge
* processes
* @property purging
* @type boolean
* @private
*/
purging,
/**
* Clear timeout state
*
* @method _clearTimeout
* @param {Object} q Queue data
* @private
*/
_clearTimeout = function(q) {
var timer = q.timer;
if (timer) {
clearTimeout(timer);
q.timer = null;
}
},
/**
* Generates an HTML element, this is not appended to a document
* @method _node
* @param {string} type the type of element.
* @param {Object} attr the fixed set of attribute for the type.
* @param {Object} custAttrs optional Any custom attributes provided by the user.
* @param {Window} win optional window to create the element in.
* @return {HTMLElement} the generated node.
* @private
*/
_node = function(type, attr, custAttrs, win) {
var w = win || Y.config.win,
d = w.document,
n = d.createElement(type),
i;
if (custAttrs) {
Y.mix(attr, custAttrs);
}
for (i in attr) {
if (attr[i] && attr.hasOwnProperty(i)) {
n.setAttribute(i, attr[i]);
}
}
return n;
},
/**
* Generates a link node
* @method _linkNode
* @param {string} url the url for the css file.
* @param {Window} win optional window to create the node in.
* @param {object} attributes optional attributes collection to apply to the
* new node.
* @return {HTMLElement} the generated node.
* @private
*/
_linkNode = function(url, win, attributes) {
return _node(LINK, {
id: Y.guid(),
type: TYPE_CSS,
rel: STYLESHEET,
href: url
}, attributes, win);
},
/**
* Generates a script node
* @method _scriptNode
* @param {string} url the url for the script file.
* @param {Window} win optional window to create the node in.
* @param {object} attributes optional attributes collection to apply to the
* new node.
* @return {HTMLElement} the generated node.
* @private
*/
_scriptNode = function(url, win, attributes) {
return _node(SCRIPT, {
id: Y.guid(),
type: TYPE_JS,
src: url
}, attributes, win);
},
/**
* Returns the data payload for callback functions.
* @method _returnData
* @param {object} q the queue.
* @param {string} msg the result message.
* @param {string} result the status message from the request.
* @return {object} the state data from the request.
* @private
*/
_returnData = function(q, msg, result) {
return {
tId: q.tId,
win: q.win,
data: q.data,
nodes: q.nodes,
msg: msg,
statusText: result,
purge: function() {
_purge(this.tId);
}
};
},
/**
* The transaction is finished
* @method _end
* @param {string} id the id of the request.
* @param {string} msg the result message.
* @param {string} result the status message from the request.
* @private
*/
_end = function(id, msg, result) {
var q = queues[id],
onEnd = q && q.onEnd;
q.finished = true;
if (onEnd) {
onEnd.call(q.context, _returnData(q, msg, result));
}
},
/**
* The request failed, execute fail handler with whatever
* was accomplished. There isn't a failure case at the
* moment unless you count aborted transactions
* @method _fail
* @param {string} id the id of the request
* @private
*/
_fail = function(id, msg) {
Y.log('get failure: ' + msg, 'warn', 'get');
var q = queues[id],
onFailure = q.onFailure;
_clearTimeout(q);
if (onFailure) {
onFailure.call(q.context, _returnData(q, msg));
}
_end(id, msg, 'failure');
},
/**
* Abort the transaction
*
* @method _abort
* @param {Object} id
* @private
*/
_abort = function(id) {
_fail(id, 'transaction ' + id + ' was aborted');
},
/**
* The request is complete, so executing the requester's callback
* @method _complete
* @param {string} id the id of the request.
* @private
*/
_complete = function(id) {
Y.log("Finishing transaction " + id, "info", "get");
var q = queues[id],
onSuccess = q.onSuccess;
_clearTimeout(q);
if (q.aborted) {
_abort(id);
} else {
if (onSuccess) {
onSuccess.call(q.context, _returnData(q));
}
// 3.3.0 had undefined msg for this path.
_end(id, undefined, 'OK');
}
},
/**
* Get node reference, from string
*
* @method _getNodeRef
* @param {String|HTMLElement} nId The node id to find. If an HTMLElement is passed in, it will be returned.
* @param {String} tId Queue id, used to determine document for queue
* @private
*/
_getNodeRef = function(nId, tId) {
var q = queues[tId],
n = (L.isString(nId)) ? q.win.document.getElementById(nId) : nId;
if (!n) {
_fail(tId, 'target node not found: ' + nId);
}
return n;
},
/**
* Removes the nodes for the specified queue
* @method _purge
* @param {string} tId the transaction id.
* @private
*/
_purge = function(tId) {
var nodes, doc, parent, sibling, node, attr, insertBefore,
i, l,
q = queues[tId];
if (q) {
nodes = q.nodes;
l = nodes.length;
// TODO: Why is node.parentNode undefined? Which forces us to do this...
/*
doc = q.win.document;
parent = doc.getElementsByTagName('head')[0];
insertBefore = q.insertBefore || doc.getElementsByTagName('base')[0];
if (insertBefore) {
sibling = _getNodeRef(insertBefore, tId);
if (sibling) {
parent = sibling.parentNode;
}
}
*/
for (i = 0; i < l; i++) {
node = nodes[i];
parent = node.parentNode;
if (node.clearAttributes) {
node.clearAttributes();
} else {
// This destroys parentNode ref, so we hold onto it above first.
for (attr in node) {
if (node.hasOwnProperty(attr)) {
delete node[attr];
}
}
}
parent.removeChild(node);
}
}
q.nodes = [];
},
/**
* Progress callback
*
* @method _progress
* @param {string} id The id of the request.
* @param {string} The url which just completed.
* @private
*/
_progress = function(id, url) {
var q = queues[id],
onProgress = q.onProgress,
o;
if (onProgress) {
o = _returnData(q);
o.url = url;
onProgress.call(q.context, o);
}
},
/**
* Timeout detected
* @method _timeout
* @param {string} id the id of the request.
* @private
*/
_timeout = function(id) {
Y.log('Timeout ' + id, 'info', 'get');
var q = queues[id],
onTimeout = q.onTimeout;
if (onTimeout) {
onTimeout.call(q.context, _returnData(q));
}
_end(id, 'timeout', 'timeout');
},
/**
* onload callback
* @method _loaded
* @param {string} id the id of the request.
* @return {string} the result.
* @private
*/
_loaded = function(id, url) {
var q = queues[id],
sync = (q && !q.async);
if (!q) {
return;
}
if (sync) {
_clearTimeout(q);
}
_progress(id, url);
// TODO: Cleaning up flow to have a consistent end point
// !q.finished check is for the async case,
// where scripts may still be loading when we've
// already aborted. Ideally there should be a single path
// for this.
if (!q.finished) {
if (q.aborted) {
_abort(id);
} else {
if ((--q.remaining) === 0) {
_complete(id);
} else if (sync) {
_next(id);
}
}
}
},
/**
* Detects when a node has been loaded. In the case of
* script nodes, this does not guarantee that contained
* script is ready to use.
* @method _trackLoad
* @param {string} type the type of node to track.
* @param {HTMLElement} n the node to track.
* @param {string} id the id of the request.
* @param {string} url the url that is being loaded.
* @private
*/
_trackLoad = function(type, n, id, url) {
// TODO: Can we massage this to use ONLOAD_SUPPORTED[type]?
// IE supports the readystatechange event for script and css nodes
// Opera only for script nodes. Opera support onload for script
// nodes, but this doesn't fire when there is a load failure.
// The onreadystatechange appears to be a better way to respond
// to both success and failure.
if (ua.ie) {
n.onreadystatechange = function() {
var rs = this.readyState;
if ('loaded' === rs || 'complete' === rs) {
// Y.log(id + " onreadstatechange " + url, "info", "get");
n.onreadystatechange = null;
_loaded(id, url);
}
};
} else if (ua.webkit) {
// webkit prior to 3.x is no longer supported
if (type === SCRIPT) {
// Safari 3.x supports the load event for script nodes (DOM2)
n.addEventListener('load', function() {
_loaded(id, url);
}, false);
}
} else {
// FireFox and Opera support onload (but not DOM2 in FF) handlers for
// script nodes. Opera, but not FF, supports the onload event for link nodes.<|fim▁hole|> // Y.log(id + " onload " + url, "info", "get");
_loaded(id, url);
};
n.onerror = function(e) {
_fail(id, e + ': ' + url);
};
}
},
_insertInDoc = function(node, id, win) {
// Add it to the head or insert it before 'insertBefore'.
// Work around IE bug if there is a base tag.
var q = queues[id],
doc = win.document,
insertBefore = q.insertBefore || doc.getElementsByTagName('base')[0],
sibling;
if (insertBefore) {
sibling = _getNodeRef(insertBefore, id);
if (sibling) {
Y.log('inserting before: ' + insertBefore, 'info', 'get');
sibling.parentNode.insertBefore(node, sibling);
}
} else {
// 3.3.0 assumed head is always around.
doc.getElementsByTagName('head')[0].appendChild(node);
}
},
/**
* Loads the next item for a given request
* @method _next
* @param {string} id the id of the request.
* @return {string} the result.
* @private
*/
_next = function(id) {
// Assigning out here for readability
var q = queues[id],
type = q.type,
attrs = q.attributes,
win = q.win,
timeout = q.timeout,
node,
url;
if (q.url.length > 0) {
url = q.url.shift();
Y.log('attempting to load ' + url, 'info', 'get');
// !q.timer ensures that this only happens once for async
if (timeout && !q.timer) {
q.timer = setTimeout(function() {
_timeout(id);
}, timeout);
}
if (type === SCRIPT) {
node = _scriptNode(url, win, attrs);
} else {
node = _linkNode(url, win, attrs);
}
// add the node to the queue so we can return it in the callback
q.nodes.push(node);
_trackLoad(type, node, id, url);
_insertInDoc(node, id, win);
if (!ONLOAD_SUPPORTED[type]) {
_loaded(id, url);
}
if (q.async) {
// For sync, the _next call is chained in _loaded
_next(id);
}
}
},
/**
* Removes processed queues and corresponding nodes
* @method _autoPurge
* @private
*/
_autoPurge = function() {
if (purging) {
return;
}
purging = true;
var i, q;
for (i in queues) {
if (queues.hasOwnProperty(i)) {
q = queues[i];
if (q.autopurge && q.finished) {
_purge(q.tId);
delete queues[i];
}
}
}
purging = false;
},
/**
* Saves the state for the request and begins loading
* the requested urls
* @method queue
* @param {string} type the type of node to insert.
* @param {string} url the url to load.
* @param {object} opts the hash of options for this request.
* @return {object} transaction object.
* @private
*/
_queue = function(type, url, opts) {
opts = opts || {};
var id = 'q' + (qidx++),
thresh = opts.purgethreshold || Y.Get.PURGE_THRESH,
q;
if (qidx % thresh === 0) {
_autoPurge();
}
// Merge to protect opts (grandfathered in).
q = queues[id] = Y.merge(opts);
// Avoid mix, merge overhead. Known set of props.
q.tId = id;
q.type = type;
q.url = url;
q.finished = false;
q.nodes = [];
q.win = q.win || Y.config.win;
q.context = q.context || q;
q.autopurge = (AUTOPURGE in q) ? q.autopurge : (type === SCRIPT) ? true : false;
q.attributes = q.attributes || {};
q.attributes.charset = opts.charset || q.attributes.charset || UTF8;
if (ASYNC in q && type === SCRIPT) {
q.attributes.async = q.async;
}
q.url = (L.isString(q.url)) ? [q.url] : q.url;
// TODO: Do we really need to account for this developer error?
// If the url is undefined, this is probably a trailing comma problem in IE.
if (!q.url[0]) {
q.url.shift();
Y.log('skipping empty url');
}
q.remaining = q.url.length;
_next(id);
return {
tId: id
};
};
Y.Get = {
/**
* The number of request required before an automatic purge.
* Can be configured via the 'purgethreshold' config
* @property PURGE_THRESH
* @static
* @type int
* @default 20
* @private
*/
PURGE_THRESH: 20,
/**
* Abort a transaction
* @method abort
* @static
* @param {string|object} o Either the tId or the object returned from
* script() or css().
*/
abort : function(o) {
var id = (L.isString(o)) ? o : o.tId,
q = queues[id];
if (q) {
Y.log('Aborting ' + id, 'info', 'get');
q.aborted = true;
}
},
/**
* Fetches and inserts one or more script nodes into the head
* of the current document or the document in a specified window.
*
* @method script
* @static
* @param {string|string[]} url the url or urls to the script(s).
* @param {object} opts Options:
* <dl>
* <dt>onSuccess</dt>
* <dd>
* callback to execute when the script(s) are finished loading
* The callback receives an object back with the following
* data:
* <dl>
* <dt>win</dt>
* <dd>the window the script(s) were inserted into</dd>
* <dt>data</dt>
* <dd>the data object passed in when the request was made</dd>
* <dt>nodes</dt>
* <dd>An array containing references to the nodes that were
* inserted</dd>
* <dt>purge</dt>
* <dd>A function that, when executed, will remove the nodes
* that were inserted</dd>
* <dt>
* </dl>
* </dd>
* <dt>onTimeout</dt>
* <dd>
* callback to execute when a timeout occurs.
* The callback receives an object back with the following
* data:
* <dl>
* <dt>win</dt>
* <dd>the window the script(s) were inserted into</dd>
* <dt>data</dt>
* <dd>the data object passed in when the request was made</dd>
* <dt>nodes</dt>
* <dd>An array containing references to the nodes that were
* inserted</dd>
* <dt>purge</dt>
* <dd>A function that, when executed, will remove the nodes
* that were inserted</dd>
* <dt>
* </dl>
* </dd>
* <dt>onEnd</dt>
* <dd>a function that executes when the transaction finishes,
* regardless of the exit path</dd>
* <dt>onFailure</dt>
* <dd>
* callback to execute when the script load operation fails
* The callback receives an object back with the following
* data:
* <dl>
* <dt>win</dt>
* <dd>the window the script(s) were inserted into</dd>
* <dt>data</dt>
* <dd>the data object passed in when the request was made</dd>
* <dt>nodes</dt>
* <dd>An array containing references to the nodes that were
* inserted successfully</dd>
* <dt>purge</dt>
* <dd>A function that, when executed, will remove any nodes
* that were inserted</dd>
* <dt>
* </dl>
* </dd>
* <dt>onProgress</dt>
* <dd>callback to execute when each individual file is done loading
* (useful when passing in an array of js files). Receives the same
* payload as onSuccess, with the addition of a <code>url</code>
* property, which identifies the file which was loaded.</dd>
* <dt>async</dt>
* <dd>
* <p>When passing in an array of JS files, setting this flag to true
* will insert them into the document in parallel, as opposed to the
* default behavior, which is to chain load them serially. It will also
* set the async attribute on the script node to true.</p>
* <p>Setting async:true
* will lead to optimal file download performance allowing the browser to
* download multiple scripts in parallel, and execute them as soon as they
* are available.</p>
* <p>Note that async:true does not guarantee execution order of the
* scripts being downloaded. They are executed in whichever order they
* are received.</p>
* </dd>
* <dt>context</dt>
* <dd>the execution context for the callbacks</dd>
* <dt>win</dt>
* <dd>a window other than the one the utility occupies</dd>
* <dt>autopurge</dt>
* <dd>
* setting to true will let the utilities cleanup routine purge
* the script once loaded
* </dd>
* <dt>purgethreshold</dt>
* <dd>
* The number of transaction before autopurge should be initiated
* </dd>
* <dt>data</dt>
* <dd>
* data that is supplied to the callback when the script(s) are
* loaded.
* </dd>
* <dt>insertBefore</dt>
* <dd>node or node id that will become the new node's nextSibling.
* If this is not specified, nodes will be inserted before a base
* tag should it exist. Otherwise, the nodes will be appended to the
* end of the document head.</dd>
* </dl>
* <dt>charset</dt>
* <dd>Node charset, default utf-8 (deprecated, use the attributes
* config)</dd>
* <dt>attributes</dt>
* <dd>An object literal containing additional attributes to add to
* the link tags</dd>
* <dt>timeout</dt>
* <dd>Number of milliseconds to wait before aborting and firing
* the timeout event</dd>
* <pre>
* Y.Get.script(
* ["http://yui.yahooapis.com/2.5.2/build/yahoo/yahoo-min.js",
* "http://yui.yahooapis.com/2.5.2/build/event/event-min.js"],
* {
* onSuccess: function(o) {
* this.log("won't cause error because Y is the context");
* Y.log(o.data); // foo
* Y.log(o.nodes.length === 2) // true
* // o.purge(); // optionally remove the script nodes
* // immediately
* },
* onFailure: function(o) {
* Y.log("transaction failed");
* },
* onTimeout: function(o) {
* Y.log("transaction timed out");
* },
* data: "foo",
* timeout: 10000, // 10 second timeout
* context: Y, // make the YUI instance
* // win: otherframe // target another window/frame
* autopurge: true // allow the utility to choose when to
* // remove the nodes
* purgetheshold: 1 // purge previous transaction before
* // next transaction
* });.
* </pre>
* @return {tId: string} an object containing info about the
* transaction.
*/
script: function(url, opts) {
return _queue(SCRIPT, url, opts);
},
/**
* Fetches and inserts one or more css link nodes into the
* head of the current document or the document in a specified
* window.
* @method css
* @static
* @param {string} url the url or urls to the css file(s).
* @param {object} opts Options:
* <dl>
* <dt>onSuccess</dt>
* <dd>
* callback to execute when the css file(s) are finished loading
* The callback receives an object back with the following
* data:
* <dl>win</dl>
* <dd>the window the link nodes(s) were inserted into</dd>
* <dt>data</dt>
* <dd>the data object passed in when the request was made</dd>
* <dt>nodes</dt>
* <dd>An array containing references to the nodes that were
* inserted</dd>
* <dt>purge</dt>
* <dd>A function that, when executed, will remove the nodes
* that were inserted</dd>
* <dt>
* </dl>
* </dd>
* <dt>onProgress</dt>
* <dd>callback to execute when each individual file is done loading (useful when passing in an array of css files). Receives the same
* payload as onSuccess, with the addition of a <code>url</code> property, which identifies the file which was loaded. Currently only useful for non Webkit/Gecko browsers,
* where onload for css is detected accurately.</dd>
* <dt>async</dt>
* <dd>When passing in an array of css files, setting this flag to true will insert them
* into the document in parallel, as oppposed to the default behavior, which is to chain load them (where possible).
* This flag is more useful for scripts currently, since for css Get only chains if not Webkit/Gecko.</dd>
* <dt>context</dt>
* <dd>the execution context for the callbacks</dd>
* <dt>win</dt>
* <dd>a window other than the one the utility occupies</dd>
* <dt>data</dt>
* <dd>
* data that is supplied to the callbacks when the nodes(s) are
* loaded.
* </dd>
* <dt>insertBefore</dt>
* <dd>node or node id that will become the new node's nextSibling</dd>
* <dt>charset</dt>
* <dd>Node charset, default utf-8 (deprecated, use the attributes
* config)</dd>
* <dt>attributes</dt>
* <dd>An object literal containing additional attributes to add to
* the link tags</dd>
* </dl>
* <pre>
* Y.Get.css("http://localhost/css/menu.css");
* </pre>
* <pre>
* Y.Get.css(
* ["http://localhost/css/menu.css",
* "http://localhost/css/logger.css"], {
* insertBefore: 'custom-styles' // nodes will be inserted
* // before the specified node
* });.
* </pre>
* @return {tId: string} an object containing info about the
* transaction.
*/
css: function(url, opts) {
return _queue('css', url, opts);
}
};
}, '@VERSION@' ,{requires:['yui-base']});
YUI.add('features', function(Y) {
var feature_tests = {};
Y.mix(Y.namespace('Features'), {
tests: feature_tests,
add: function(cat, name, o) {
feature_tests[cat] = feature_tests[cat] || {};
feature_tests[cat][name] = o;
},
all: function(cat, args) {
var cat_o = feature_tests[cat],
// results = {};
result = [];
if (cat_o) {
Y.Object.each(cat_o, function(v, k) {
result.push(k + ':' + (Y.Features.test(cat, k, args) ? 1 : 0));
});
}
return (result.length) ? result.join(';') : '';
},
test: function(cat, name, args) {
args = args || [];
var result, ua, test,
cat_o = feature_tests[cat],
feature = cat_o && cat_o[name];
if (!feature) {
Y.log('Feature test ' + cat + ', ' + name + ' not found');
} else {
result = feature.result;
if (Y.Lang.isUndefined(result)) {
ua = feature.ua;
if (ua) {
result = (Y.UA[ua]);
}
test = feature.test;
if (test && ((!ua) || result)) {
result = test.apply(Y, args);
}
feature.result = result;
}
}
return result;
}
});
// Y.Features.add("load", "1", {});
// Y.Features.test("load", "1");
// caps=1:1;2:0;3:1;
/* This file is auto-generated by src/loader/scripts/meta_join.py */
var add = Y.Features.add;
// graphics-canvas-default
add('load', '0', {
"name": "graphics-canvas-default",
"test": function(Y) {
var DOCUMENT = Y.config.doc,
canvas = DOCUMENT && DOCUMENT.createElement("canvas");
return (DOCUMENT && !DOCUMENT.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1") && (canvas && canvas.getContext && canvas.getContext("2d")));
},
"trigger": "graphics"
});
// autocomplete-list-keys
add('load', '1', {
"name": "autocomplete-list-keys",
"test": function (Y) {
// Only add keyboard support to autocomplete-list if this doesn't appear to
// be an iOS or Android-based mobile device.
//
// There's currently no feasible way to actually detect whether a device has
// a hardware keyboard, so this sniff will have to do. It can easily be
// overridden by manually loading the autocomplete-list-keys module.
//
// Worth noting: even though iOS supports bluetooth keyboards, Mobile Safari
// doesn't fire the keyboard events used by AutoCompleteList, so there's
// no point loading the -keys module even when a bluetooth keyboard may be
// available.
return !(Y.UA.ios || Y.UA.android);
},
"trigger": "autocomplete-list"
});
// graphics-svg
add('load', '2', {
"name": "graphics-svg",
"test": function(Y) {
var DOCUMENT = Y.config.doc;
return (DOCUMENT && DOCUMENT.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1"));
},
"trigger": "graphics"
});
// history-hash-ie
add('load', '3', {
"name": "history-hash-ie",
"test": function (Y) {
var docMode = Y.config.doc && Y.config.doc.documentMode;
return Y.UA.ie && (!('onhashchange' in Y.config.win) ||
!docMode || docMode < 8);
},
"trigger": "history-hash"
});
// graphics-vml-default
add('load', '4', {
"name": "graphics-vml-default",
"test": function(Y) {
var DOCUMENT = Y.config.doc,
canvas = DOCUMENT && DOCUMENT.createElement("canvas");
return (DOCUMENT && !DOCUMENT.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1") && (!canvas || !canvas.getContext || !canvas.getContext("2d")));
},
"trigger": "graphics"
});
// graphics-svg-default
add('load', '5', {
"name": "graphics-svg-default",
"test": function(Y) {
var DOCUMENT = Y.config.doc;
return (DOCUMENT && DOCUMENT.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1"));
},
"trigger": "graphics"
});
// widget-base-ie
add('load', '6', {
"name": "widget-base-ie",
"trigger": "widget-base",
"ua": "ie"
});
// transition-timer
add('load', '7', {
"name": "transition-timer",
"test": function (Y) {
var DOCUMENT = Y.config.doc,
node = (DOCUMENT) ? DOCUMENT.documentElement: null,
ret = true;
if (node && node.style) {
ret = !('MozTransition' in node.style || 'WebkitTransition' in node.style);
}
return ret;
},
"trigger": "transition"
});
// dom-style-ie
add('load', '8', {
"name": "dom-style-ie",
"test": function (Y) {
var testFeature = Y.Features.test,
addFeature = Y.Features.add,
WINDOW = Y.config.win,
DOCUMENT = Y.config.doc,
DOCUMENT_ELEMENT = 'documentElement',
ret = false;
addFeature('style', 'computedStyle', {
test: function() {
return WINDOW && 'getComputedStyle' in WINDOW;
}
});
addFeature('style', 'opacity', {
test: function() {
return DOCUMENT && 'opacity' in DOCUMENT[DOCUMENT_ELEMENT].style;
}
});
ret = (!testFeature('style', 'opacity') &&
!testFeature('style', 'computedStyle'));
return ret;
},
"trigger": "dom-style"
});
// selector-css2
add('load', '9', {
"name": "selector-css2",
"test": function (Y) {
var DOCUMENT = Y.config.doc,
ret = DOCUMENT && !('querySelectorAll' in DOCUMENT);
return ret;
},
"trigger": "selector"
});
// event-base-ie
add('load', '10', {
"name": "event-base-ie",
"test": function(Y) {
var imp = Y.config.doc && Y.config.doc.implementation;
return (imp && (!imp.hasFeature('Events', '2.0')));
},
"trigger": "node-base"
});
// dd-gestures
add('load', '11', {
"name": "dd-gestures",
"test": function(Y) {
return (Y.config.win && ('ontouchstart' in Y.config.win && !Y.UA.chrome));
},
"trigger": "dd-drag"
});
// scrollview-base-ie
add('load', '12', {
"name": "scrollview-base-ie",
"trigger": "scrollview-base",
"ua": "ie"
});
// graphics-canvas
add('load', '13', {
"name": "graphics-canvas",
"test": function(Y) {
var DOCUMENT = Y.config.doc,
canvas = DOCUMENT && DOCUMENT.createElement("canvas");
return (DOCUMENT && !DOCUMENT.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1") && (canvas && canvas.getContext && canvas.getContext("2d")));
},
"trigger": "graphics"
});
// graphics-vml
add('load', '14', {
"name": "graphics-vml",
"test": function(Y) {
var DOCUMENT = Y.config.doc,
canvas = DOCUMENT && DOCUMENT.createElement("canvas");
return (DOCUMENT && !DOCUMENT.implementation.hasFeature("http://www.w3.org/TR/SVG11/feature#BasicStructure", "1.1") && (!canvas || !canvas.getContext || !canvas.getContext("2d")));
},
"trigger": "graphics"
});
}, '@VERSION@' ,{requires:['yui-base']});
YUI.add('intl-base', function(Y) {
/**
* The Intl utility provides a central location for managing sets of
* localized resources (strings and formatting patterns).
*
* @class Intl
* @uses EventTarget
* @static
*/
var SPLIT_REGEX = /[, ]/;
Y.mix(Y.namespace('Intl'), {
/**
* Returns the language among those available that
* best matches the preferred language list, using the Lookup
* algorithm of BCP 47.
* If none of the available languages meets the user's preferences,
* then "" is returned.
* Extended language ranges are not supported.
*
* @method lookupBestLang
* @param {String[] | String} preferredLanguages The list of preferred
* languages in descending preference order, represented as BCP 47
* language tags. A string array or a comma-separated list.
* @param {String[]} availableLanguages The list of languages
* that the application supports, represented as BCP 47 language
* tags.
*
* @return {String} The available language that best matches the
* preferred language list, or "".
* @since 3.1.0
*/
lookupBestLang: function(preferredLanguages, availableLanguages) {
var i, language, result, index;
// check whether the list of available languages contains language;
// if so return it
function scan(language) {
var i;
for (i = 0; i < availableLanguages.length; i += 1) {
if (language.toLowerCase() ===
availableLanguages[i].toLowerCase()) {
return availableLanguages[i];
}
}
}
if (Y.Lang.isString(preferredLanguages)) {
preferredLanguages = preferredLanguages.split(SPLIT_REGEX);
}
for (i = 0; i < preferredLanguages.length; i += 1) {
language = preferredLanguages[i];
if (!language || language === '*') {
continue;
}
// check the fallback sequence for one language
while (language.length > 0) {
result = scan(language);
if (result) {
return result;
} else {
index = language.lastIndexOf('-');
if (index >= 0) {
language = language.substring(0, index);
// one-character subtags get cut along with the
// following subtag
if (index >= 2 && language.charAt(index - 2) === '-') {
language = language.substring(0, index - 2);
}
} else {
// nothing available for this language
break;
}
}
}
}
return '';
}
});
}, '@VERSION@' ,{requires:['yui-base']});
YUI.add('rls', function(Y) {
/**
* RLS (Remote Loader Service) Support
* @module yui
* @submodule rls
* @class rls
*/
Y.rls_handleTimeout = function(o) {
Y.Get.abort(o.tId);
o.purge();
o.message = 'RLS request timed out, fetching loader';
Y.rls_failure(o);
};
Y.rls_handleFailure = function(o) {
o.message = 'RLS request failed, fetching loader';
Y.rls_failure(o);
};
Y.rls_failure = function(o) {
Y.log(o.message, 'warn', 'rls');
YUI.Env.rls_disabled = true;
Y.config.use_rls = false;
if (o.data) {
o.data.unshift('loader');
Y._use(o.data, function(Y, response) {
Y._notify(Y.rls_callback, response, o.data);
//Call the RLS done method, so it can progress the queue
Y.rls_advance();
});
}
};
/**
* Checks the environment for local modules and deals with them before firing off an RLS request.
* This needs to make sure that all dependencies are calculated before it can make an RLS request in
* order to make sure all remote dependencies are evaluated and their requirements are met.
* @method rls_locals
* @private
* @param {YUI} instance The YUI Instance we are working with.
* @param {Array} argz The requested modules.
* @param {Callback} cb The callback to be executed when we are done
* @param {YUI} cb.instance The instance is passed back to the callback
* @param {Array} cb.argz The modified list or modules needed to require
*/
Y.rls_locals = function(instance, argz, cb) {
if (YUI.Env.rls_disabled) {
var data = {
message: 'RLS is disabled, moving to loader',
data: argz
};
Y.rls_failure(data);
return;
}
if (instance.config.modules) {
var files = [], asked = Y.Array.hash(argz),
PATH = 'fullpath', f,
mods = instance.config.modules;
for (f in mods) {
if (mods[f][PATH]) {
if (asked[f]) {
files.push(mods[f][PATH]);
if (mods[f].requires) {
Y.Array.each(mods[f].requires, function(f) {
if (!YUI.Env.mods[f]) {
if (mods[f]) {
if (mods[f][PATH]) {
files.push(mods[f][PATH]);
argz.push(f);
}
}
}
});
}
}
}
}
if (files.length) {
Y.Get.script(files, {
onEnd: function(o) {
cb(instance, argz);
},
data: argz
});
} else {
cb(instance, argz);
}
} else {
cb(instance, argz);
}
};
/**
* Check the environment and the local config to determine if a module has already been registered.
* @method rls_needs
* @private
* @param {String} mod The module to check
* @param {YUI} instance The instance to check against.
*/
Y.rls_needs = function(mod, instance) {
var self = instance || this,
config = self.config, i,
m = YUI.Env.aliases[mod];
if (m) {
Y.log('We have an alias (' + mod + '), are all the deps available?', 'info', 'rls');
for (i = 0; i < m.length; i++) {
if (Y.rls_needs(m[i])) {
Y.log('Needs (' + mod + ')', 'info', 'rls');
return true;
}
}
Y.log('Does not need (' + mod + ')', 'info', 'rls');
return false;
}
if (!YUI.Env.mods[mod] && !(config.modules && config.modules[mod])) {
Y.log('Needs (' + mod + ')', 'info', 'rls');
return true;
}
Y.log('Does not need (' + mod + ')', 'info', 'rls');
return false;
};
/**
* Implentation for building the remote loader service url.
* @method _rls
* @private
* @param {Array} what the requested modules.
* @since 3.2.0
* @return {string} the url for the remote loader service call, returns false if no modules are required to be fetched (they are in the ENV already).
*/
Y._rls = function(what) {
//what.push('intl');
Y.log('Issuing a new RLS Request', 'info', 'rls');
var config = Y.config,
mods = config.modules,
YArray = Y.Array,
YObject = Y.Object,
// the configuration
rls = config.rls || {
m: 1, // required in the template
v: Y.version,
gv: config.gallery,
env: 1, // required in the template
lang: config.lang,
'2in3v': config['2in3'],
'2v': config.yui2,
filt: config.filter,
filts: config.filters,
ignore: config.ignore,
tests: 1 // required in the template
},
// The rls base path
rls_base = config.rls_base || 'http://l.yimg.com/py/load?httpcache=rls-seed&gzip=1&',
// the template
rls_tmpl = config.rls_tmpl || function() {
var s = [], param;
for (param in rls) {
if (param in rls && rls[param]) {
s.push(param + '={' + param + '}');
}
}
return s.join('&');
}(),
m = [], asked = {}, o, d, mod, a, j,
w = [],
i, len = what.length,
url;
//Explode our aliases..
for (i = 0; i < len; i++) {
a = YUI.Env.aliases[what[i]];
if (a) {
for (j = 0; j < a.length; j++) {
w.push(a[j]);
}
} else {
w.push(what[i]);
}
}
what = w;
len = what.length;
for (i = 0; i < len; i++) {
asked[what[i]] = 1;
if (Y.rls_needs(what[i])) {
Y.log('Did not find ' + what[i] + ' in YUI.Env.mods or config.modules adding to RLS', 'info', 'rls');
m.push(what[i]);
} else {
Y.log(what[i] + ' was skipped from RLS', 'info', 'rls');
}
}
if (mods) {
for (i in mods) {
if (asked[i] && mods[i].requires && !mods[i].noop) {
len = mods[i].requires.length;
for (o = 0; o < len; o++) {
mod = mods[i].requires[o];
if (Y.rls_needs(mod)) {
m.push(mod);
} else {
d = YUI.Env.mods[mod] || mods[mod];
if (d) {
d = d.details || d;
if (!d.noop) {
if (d.requires) {
YArray.each(d.requires, function(o) {
if (Y.rls_needs(o)) {
m.push(o);
}
});
}
}
}
}
}
}
}
}
YObject.each(YUI.Env.mods, function(i) {
if (asked[i.name]) {
if (i.details && i.details.requires) {
if (!i.noop) {
YArray.each(i.details.requires, function(o) {
if (Y.rls_needs(o)) {
m.push(o);
}
});
}
}
}
});
function addIfNeeded(module) {
if (Y.rls_needs(module)) {
m.unshift(module);
}
}
//Add in the debug modules
if (rls.filt === 'debug') {
YArray.each(['dump', 'yui-log'], addIfNeeded);
}
//If they have a groups config, add the loader-base module
if (Y.config.groups) {
addIfNeeded('loader-base');
}
m = YArray.dedupe(m);
//Strip Duplicates
m = YArray.dedupe(m);
what = YArray.dedupe(what);
if (!m.length) {
//Return here if there no modules to load.
Y.log('RLS request terminated, no modules in m', 'warn', 'rls');
return false;
}
// update the request
rls.m = m.sort(); // cache proxy optimization
rls.env = [].concat(YObject.keys(YUI.Env.mods), YArray.dedupe(YUI._rls_skins)).sort();
rls.tests = Y.Features.all('load', [Y]);
url = Y.Lang.sub(rls_base + rls_tmpl, rls);
config.rls = rls;
config.rls_tmpl = rls_tmpl;
YUI._rls_active = {
asked: what,
attach: m,
inst: Y,
url: url
};
return url;
};
/**
*
* @method rls_oncomplete
* @param {Callback} cb The callback to execute when the RLS request is complete
*/
Y.rls_oncomplete = function(cb) {
YUI._rls_active.cb = cb;
};
Y.rls_advance = function() {
var G_ENV = YUI.Env;
G_ENV._rls_in_progress = false;
if (G_ENV._rls_queue.size()) {
G_ENV._rls_queue.next()();
}
};
/**
* Calls the callback registered with Y.rls_oncomplete when the RLS request (and it's dependency requests) is done.
* @method rls_done
* @param {Array} data The modules loaded
*/
Y.rls_done = function(data) {
Y.log('RLS Request complete', 'info', 'rls');
data.success = true;
YUI._rls_active.cb(data);
};
/**
* Hash to hang on to the calling RLS instance so we can deal with the return from the server.
* @property _rls_active
* @private
* @type Object
* @static
*/
if (!YUI._rls_active) {
YUI._rls_active = {};
}
/**
* An array of skins loaded via RLS to populate the ENV with when making future requests.
* @property _rls_skins
* @private
* @type Array
* @static
*/
if (!YUI._rls_skins) {
YUI._rls_skins = [];
}
/**
*
* @method $rls
* @private
* @static
* @param {Object} req The data returned from the RLS server
* @param {String} req.css Does this request need CSS? If so, load the same RLS url with &css=1 attached
* @param {Array} req.module The sorted list of modules to attach to the page.
*/
if (!YUI.$rls) {
YUI.$rls = function(req) {
var rls_active = YUI._rls_active,
Y = rls_active.inst;
if (Y) {
Y.log('RLS request received, processing', 'info', 'rls');
if (req.error) {
Y.rls_failure({
message: req.error,
data: req.modules
});
}
if (YUI.Env && YUI.Env.rls_disabled) {
Y.log('RLS processing on this instance is disabled.', 'warn', 'rls');
return;
}
if (req.css && Y.config.fetchCSS) {
Y.Get.css(rls_active.url + '&css=1');
}
if (req.modules && !req.css) {
if (req.modules.length) {
var loadInt = Y.Array.some(req.modules, function(v) {
return (v.indexOf('lang') === 0);
});
if (loadInt) {
req.modules.unshift('intl');
}
}
Y.Env.bootstrapped = true;
Y.Array.each(req.modules, function(v) {
if (v.indexOf('skin-') > -1) {
Y.log('Found skin (' + v + ') caching module for future requests', 'info', 'rls');
YUI._rls_skins.push(v);
}
});
Y._attach([].concat(req.modules, rls_active.asked));
var additional = req.missing;
if (Y.config.groups) {
if (!additional) {
additional = [];
}
additional = [].concat(additional, rls_active.what);
}
if (additional && Y.Loader) {
Y.log('Making extra Loader request', 'info', 'rls');
var loader = new Y.Loader(rls_active.inst.config);
loader.onEnd = Y.rls_done;
loader.context = Y;
loader.data = additional;
loader.ignoreRegistered = false;
loader.require(additional);
loader.insert(null, (Y.config.fetchCSS) ? null : 'js');
} else {
Y.rls_done({ data: req.modules });
}
}
}
};
}
}, '@VERSION@' ,{requires:['get','features']});
YUI.add('yui-log', function(Y) {
/**
* Provides console log capability and exposes a custom event for
* console implementations. This module is a `core` YUI module, <a href="../classes/YUI.html#method_log">it's documentation is located under the YUI class</a>.
*
* @module yui
* @submodule yui-log
*/
var INSTANCE = Y,
LOGEVENT = 'yui:log',
UNDEFINED = 'undefined',
LEVELS = { debug: 1,
info: 1,
warn: 1,
error: 1 };
/**
* If the 'debug' config is true, a 'yui:log' event will be
* dispatched, which the Console widget and anything else
* can consume. If the 'useBrowserConsole' config is true, it will
* write to the browser console if available. YUI-specific log
* messages will only be present in the -debug versions of the
* JS files. The build system is supposed to remove log statements
* from the raw and minified versions of the files.
*
* @method log
* @for YUI
* @param {String} msg The message to log.
* @param {String} cat The log category for the message. Default
* categories are "info", "warn", "error", time".
* Custom categories can be used as well. (opt).
* @param {String} src The source of the the message (opt).
* @param {boolean} silent If true, the log event won't fire.
* @return {YUI} YUI instance.
*/
INSTANCE.log = function(msg, cat, src, silent) {
var bail, excl, incl, m, f,
Y = INSTANCE,
c = Y.config,
publisher = (Y.fire) ? Y : YUI.Env.globalEvents;
// suppress log message if the config is off or the event stack
// or the event call stack contains a consumer of the yui:log event
if (c.debug) {
// apply source filters
if (src) {
excl = c.logExclude;
incl = c.logInclude;
if (incl && !(src in incl)) {
bail = 1;
} else if (incl && (src in incl)) {
bail = !incl[src];
} else if (excl && (src in excl)) {
bail = excl[src];
}
}
if (!bail) {
if (c.useBrowserConsole) {
m = (src) ? src + ': ' + msg : msg;
if (Y.Lang.isFunction(c.logFn)) {
c.logFn.call(Y, msg, cat, src);
} else if (typeof console != UNDEFINED && console.log) {
f = (cat && console[cat] && (cat in LEVELS)) ? cat : 'log';
console[f](m);
} else if (typeof opera != UNDEFINED) {
opera.postError(m);
}
}
if (publisher && !silent) {
if (publisher == Y && (!publisher.getEvent(LOGEVENT))) {
publisher.publish(LOGEVENT, {
broadcast: 2
});
}
publisher.fire(LOGEVENT, {
msg: msg,
cat: cat,
src: src
});
}
}
}
return Y;
};
/**
* Write a system message. This message will be preserved in the
* minified and raw versions of the YUI files, unlike log statements.
* @method message
* @for YUI
* @param {String} msg The message to log.
* @param {String} cat The log category for the message. Default
* categories are "info", "warn", "error", time".
* Custom categories can be used as well. (opt).
* @param {String} src The source of the the message (opt).
* @param {boolean} silent If true, the log event won't fire.
* @return {YUI} YUI instance.
*/
INSTANCE.message = function() {
return INSTANCE.log.apply(INSTANCE, arguments);
};
}, '@VERSION@' ,{requires:['yui-base']});
YUI.add('yui-later', function(Y) {
/**
* Provides a setTimeout/setInterval wrapper. This module is a `core` YUI module, <a href="../classes/YUI.html#method_later">it's documentation is located under the YUI class</a>.
*
* @module yui
* @submodule yui-later
*/
var NO_ARGS = [];
/**
* Executes the supplied function in the context of the supplied
* object 'when' milliseconds later. Executes the function a
* single time unless periodic is set to true.
* @for YUI
* @method later
* @param when {int} the number of milliseconds to wait until the fn
* is executed.
* @param o the context object.
* @param fn {Function|String} the function to execute or the name of
* the method in the 'o' object to execute.
* @param data [Array] data that is provided to the function. This
* accepts either a single item or an array. If an array is provided,
* the function is executed with one parameter for each array item.
* If you need to pass a single array parameter, it needs to be wrapped
* in an array [myarray].
*
* Note: native methods in IE may not have the call and apply methods.
* In this case, it will work, but you are limited to four arguments.
*
* @param periodic {boolean} if true, executes continuously at supplied
* interval until canceled.
* @return {object} a timer object. Call the cancel() method on this
* object to stop the timer.
*/
Y.later = function(when, o, fn, data, periodic) {
when = when || 0;
data = (!Y.Lang.isUndefined(data)) ? Y.Array(data) : data;
var cancelled = false,
method = (o && Y.Lang.isString(fn)) ? o[fn] : fn,
wrapper = function() {
// IE 8- may execute a setInterval callback one last time
// after clearInterval was called, so in order to preserve
// the cancel() === no more runny-run, we have to jump through
// an extra hoop.
if (!cancelled) {
if (!method.apply) {
method(data[0], data[1], data[2], data[3]);
} else {
method.apply(o, data || NO_ARGS);
}
}
},
id = (periodic) ? setInterval(wrapper, when) : setTimeout(wrapper, when);
return {
id: id,
interval: periodic,
cancel: function() {
cancelled = true;
if (this.interval) {
clearInterval(id);
} else {
clearTimeout(id);
}
}
};
};
Y.Lang.later = Y.later;
}, '@VERSION@' ,{requires:['yui-base']});
YUI.add('yui', function(Y){}, '@VERSION@' ,{use:['yui-base','get','features','intl-base','rls','yui-log','yui-later']});<|fim▁end|> |
n.onload = function() { |
<|file_name|>test_replace.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import print_function
from datetime import datetime
import re
from pandas.compat import (zip, range, lrange, StringIO)
from pandas import (DataFrame, Series, Index, date_range, compat,
Timestamp)
import pandas as pd
from numpy import nan
import numpy as np
from pandas.util.testing import (assert_series_equal,
assert_frame_equal)
import pandas.util.testing as tm
from pandas.tests.frame.common import TestData
class TestDataFrameReplace(tm.TestCase, TestData):
_multiprocess_can_split_ = True
def test_replace_inplace(self):
self.tsframe['A'][:5] = nan
self.tsframe['A'][-5:] = nan
tsframe = self.tsframe.copy()
tsframe.replace(nan, 0, inplace=True)
assert_frame_equal(tsframe, self.tsframe.fillna(0))
self.assertRaises(TypeError, self.tsframe.replace, nan, inplace=True)
self.assertRaises(TypeError, self.tsframe.replace, nan)
# mixed type
self.mixed_frame.ix[5:20, 'foo'] = nan
self.mixed_frame.ix[-10:, 'A'] = nan
result = self.mixed_frame.replace(np.nan, 0)
expected = self.mixed_frame.fillna(value=0)
assert_frame_equal(result, expected)
tsframe = self.tsframe.copy()
tsframe.replace([nan], [0], inplace=True)
assert_frame_equal(tsframe, self.tsframe.fillna(0))
def test_regex_replace_scalar(self):
obj = {'a': list('ab..'), 'b': list('efgh')}
dfobj = DataFrame(obj)
mix = {'a': lrange(4), 'b': list('ab..')}
dfmix = DataFrame(mix)
# simplest cases
# regex -> value
# obj frame
res = dfobj.replace(r'\s*\.\s*', nan, regex=True)
assert_frame_equal(dfobj, res.fillna('.'))
# mixed
res = dfmix.replace(r'\s*\.\s*', nan, regex=True)
assert_frame_equal(dfmix, res.fillna('.'))
# regex -> regex
# obj frame
res = dfobj.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True)
objc = obj.copy()
objc['a'] = ['a', 'b', '...', '...']
expec = DataFrame(objc)
assert_frame_equal(res, expec)
# with mixed
res = dfmix.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True)
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
# everything with compiled regexs as well
res = dfobj.replace(re.compile(r'\s*\.\s*'), nan, regex=True)
assert_frame_equal(dfobj, res.fillna('.'))
# mixed
res = dfmix.replace(re.compile(r'\s*\.\s*'), nan, regex=True)
assert_frame_equal(dfmix, res.fillna('.'))
# regex -> regex
# obj frame
res = dfobj.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1')
objc = obj.copy()
objc['a'] = ['a', 'b', '...', '...']
expec = DataFrame(objc)
assert_frame_equal(res, expec)
# with mixed
res = dfmix.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1')
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
res = dfmix.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1')
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
res = dfmix.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1')
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
def test_regex_replace_scalar_inplace(self):
obj = {'a': list('ab..'), 'b': list('efgh')}
dfobj = DataFrame(obj)
mix = {'a': lrange(4), 'b': list('ab..')}
dfmix = DataFrame(mix)
# simplest cases
# regex -> value
# obj frame
res = dfobj.copy()
res.replace(r'\s*\.\s*', nan, regex=True, inplace=True)
assert_frame_equal(dfobj, res.fillna('.'))
# mixed
res = dfmix.copy()
res.replace(r'\s*\.\s*', nan, regex=True, inplace=True)
assert_frame_equal(dfmix, res.fillna('.'))
# regex -> regex
# obj frame
res = dfobj.copy()
res.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True, inplace=True)
objc = obj.copy()
objc['a'] = ['a', 'b', '...', '...']
expec = DataFrame(objc)
assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
res.replace(r'\s*(\.)\s*', r'\1\1\1', regex=True, inplace=True)
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
# everything with compiled regexs as well
res = dfobj.copy()
res.replace(re.compile(r'\s*\.\s*'), nan, regex=True, inplace=True)
assert_frame_equal(dfobj, res.fillna('.'))
# mixed
res = dfmix.copy()
res.replace(re.compile(r'\s*\.\s*'), nan, regex=True, inplace=True)
assert_frame_equal(dfmix, res.fillna('.'))
# regex -> regex
# obj frame
res = dfobj.copy()
res.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1', regex=True,
inplace=True)
objc = obj.copy()
objc['a'] = ['a', 'b', '...', '...']
expec = DataFrame(objc)
assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
res.replace(re.compile(r'\s*(\.)\s*'), r'\1\1\1', regex=True,
inplace=True)
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
res = dfobj.copy()
res.replace(regex=r'\s*\.\s*', value=nan, inplace=True)
assert_frame_equal(dfobj, res.fillna('.'))
# mixed
res = dfmix.copy()
res.replace(regex=r'\s*\.\s*', value=nan, inplace=True)
assert_frame_equal(dfmix, res.fillna('.'))
# regex -> regex
# obj frame
res = dfobj.copy()
res.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1', inplace=True)
objc = obj.copy()
objc['a'] = ['a', 'b', '...', '...']
expec = DataFrame(objc)
assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
res.replace(regex=r'\s*(\.)\s*', value=r'\1\1\1', inplace=True)
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
# everything with compiled regexs as well
res = dfobj.copy()
res.replace(regex=re.compile(r'\s*\.\s*'), value=nan, inplace=True)
assert_frame_equal(dfobj, res.fillna('.'))
# mixed
res = dfmix.copy()
res.replace(regex=re.compile(r'\s*\.\s*'), value=nan, inplace=True)
assert_frame_equal(dfmix, res.fillna('.'))
# regex -> regex
# obj frame
res = dfobj.copy()
res.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1',
inplace=True)
objc = obj.copy()
objc['a'] = ['a', 'b', '...', '...']
expec = DataFrame(objc)
assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
res.replace(regex=re.compile(r'\s*(\.)\s*'), value=r'\1\1\1',
inplace=True)
mixc = mix.copy()
mixc['b'] = ['a', 'b', '...', '...']
expec = DataFrame(mixc)
assert_frame_equal(res, expec)
def test_regex_replace_list_obj(self):
obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')}
dfobj = DataFrame(obj)
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r'\s*\.\s*', r'e|f|g']
values = [nan, 'crap']
res = dfobj.replace(to_replace_res, values, regex=True)
expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap'] * 3 +
['h'], 'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r'\s*(\.)\s*', r'(e|f|g)']
values = [r'\1\1', r'\1_crap']
res = dfobj.replace(to_replace_res, values, regex=True)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e_crap',
'f_crap',
'g_crap', 'h'],
'c': ['h', 'e_crap', 'l', 'o']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r'\s*(\.)\s*', r'e']
values = [r'\1\1', r'crap']
res = dfobj.replace(to_replace_res, values, regex=True)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g',
'h'],
'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
to_replace_res = [r'\s*(\.)\s*', r'e']
values = [r'\1\1', r'crap']
res = dfobj.replace(value=values, regex=to_replace_res)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g',
'h'],
'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
def test_regex_replace_list_obj_inplace(self):
# same as above with inplace=True
# lists of regexes and values
obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')}
dfobj = DataFrame(obj)
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r'\s*\.\s*', r'e|f|g']
values = [nan, 'crap']
res = dfobj.copy()
res.replace(to_replace_res, values, inplace=True, regex=True)
expec = DataFrame({'a': ['a', 'b', nan, nan], 'b': ['crap'] * 3 +
['h'], 'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r'\s*(\.)\s*', r'(e|f|g)']
values = [r'\1\1', r'\1_crap']
res = dfobj.copy()
res.replace(to_replace_res, values, inplace=True, regex=True)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e_crap',
'f_crap',
'g_crap', 'h'],
'c': ['h', 'e_crap', 'l', 'o']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r'\s*(\.)\s*', r'e']
values = [r'\1\1', r'crap']
res = dfobj.copy()
res.replace(to_replace_res, values, inplace=True, regex=True)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g',
'h'],
'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
to_replace_res = [r'\s*(\.)\s*', r'e']
values = [r'\1\1', r'crap']
res = dfobj.copy()
res.replace(value=values, regex=to_replace_res, inplace=True)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['crap', 'f', 'g',
'h'],
'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
def test_regex_replace_list_mixed(self):
# mixed frame to make sure this doesn't break things
mix = {'a': lrange(4), 'b': list('ab..')}
dfmix = DataFrame(mix)
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r'\s*\.\s*', r'a']
values = [nan, 'crap']
mix2 = {'a': lrange(4), 'b': list('ab..'), 'c': list('halo')}
dfmix2 = DataFrame(mix2)
res = dfmix2.replace(to_replace_res, values, regex=True)
expec = DataFrame({'a': mix2['a'], 'b': ['crap', 'b', nan, nan],
'c': ['h', 'crap', 'l', 'o']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r'\s*(\.)\s*', r'(a|b)']
values = [r'\1\1', r'\1_crap']
res = dfmix.replace(to_replace_res, values, regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['a_crap', 'b_crap', '..',
'..']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)']
values = [r'\1\1', r'crap', r'\1_crap']
res = dfmix.replace(to_replace_res, values, regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']})
assert_frame_equal(res, expec)
to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)']
values = [r'\1\1', r'crap', r'\1_crap']
res = dfmix.replace(regex=to_replace_res, value=values)
expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']})
assert_frame_equal(res, expec)
def test_regex_replace_list_mixed_inplace(self):
mix = {'a': lrange(4), 'b': list('ab..')}
dfmix = DataFrame(mix)
# the same inplace
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r'\s*\.\s*', r'a']
values = [nan, 'crap']
res = dfmix.copy()
res.replace(to_replace_res, values, inplace=True, regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b', nan, nan]})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r'\s*(\.)\s*', r'(a|b)']
values = [r'\1\1', r'\1_crap']
res = dfmix.copy()
res.replace(to_replace_res, values, inplace=True, regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['a_crap', 'b_crap', '..',
'..']})
assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)']
values = [r'\1\1', r'crap', r'\1_crap']
res = dfmix.copy()
res.replace(to_replace_res, values, inplace=True, regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']})
assert_frame_equal(res, expec)
to_replace_res = [r'\s*(\.)\s*', r'a', r'(b)']
values = [r'\1\1', r'crap', r'\1_crap']
res = dfmix.copy()
res.replace(regex=to_replace_res, value=values, inplace=True)
expec = DataFrame({'a': mix['a'], 'b': ['crap', 'b_crap', '..', '..']})
assert_frame_equal(res, expec)
def test_regex_replace_dict_mixed(self):
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
dfmix = DataFrame(mix)
# dicts
# single dict {re1: v1}, search the whole frame
# need test for this...
# list of dicts {re1: v1, re2: v2, ..., re3: v3}, search the whole
# frame
res = dfmix.replace({'b': r'\s*\.\s*'}, {'b': nan}, regex=True)
res2 = dfmix.copy()
res2.replace({'b': r'\s*\.\s*'}, {'b': nan}, inplace=True, regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
# list of dicts {re1: re11, re2: re12, ..., reN: re1N}, search the
# whole frame
res = dfmix.replace({'b': r'\s*(\.)\s*'}, {'b': r'\1ty'}, regex=True)
res2 = dfmix.copy()
res2.replace({'b': r'\s*(\.)\s*'}, {'b': r'\1ty'}, inplace=True,
regex=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', '.ty', '.ty'], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
res = dfmix.replace(regex={'b': r'\s*(\.)\s*'}, value={'b': r'\1ty'})
res2 = dfmix.copy()
res2.replace(regex={'b': r'\s*(\.)\s*'}, value={'b': r'\1ty'},
inplace=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', '.ty', '.ty'], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
# scalar -> dict
# to_replace regex, {value: value}
expec = DataFrame({'a': mix['a'], 'b': [nan, 'b', '.', '.'], 'c':
mix['c']})
res = dfmix.replace('a', {'b': nan}, regex=True)
res2 = dfmix.copy()
res2.replace('a', {'b': nan}, regex=True, inplace=True)
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
res = dfmix.replace('a', {'b': nan}, regex=True)
res2 = dfmix.copy()
res2.replace(regex='a', value={'b': nan}, inplace=True)
expec = DataFrame({'a': mix['a'], 'b': [nan, 'b', '.', '.'], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
def test_regex_replace_dict_nested(self):
# nested dicts will not work until this is implemented for Series
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
dfmix = DataFrame(mix)
res = dfmix.replace({'b': {r'\s*\.\s*': nan}}, regex=True)
res2 = dfmix.copy()
res4 = dfmix.copy()
res2.replace({'b': {r'\s*\.\s*': nan}}, inplace=True, regex=True)
res3 = dfmix.replace(regex={'b': {r'\s*\.\s*': nan}})
res4.replace(regex={'b': {r'\s*\.\s*': nan}}, inplace=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
assert_frame_equal(res3, expec)
assert_frame_equal(res4, expec)
def test_regex_replace_dict_nested_gh4115(self):
df = pd.DataFrame({'Type': ['Q', 'T', 'Q', 'Q', 'T'], 'tmp': 2})
expected = DataFrame({'Type': [0, 1, 0, 0, 1], 'tmp': 2})
result = df.replace({'Type': {'Q': 0, 'T': 1}})
assert_frame_equal(result, expected)
def test_regex_replace_list_to_scalar(self):
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
df = DataFrame(mix)
expec = DataFrame({'a': mix['a'], 'b': np.array([nan] * 4),
'c': [nan, nan, nan, 'd']})
res = df.replace([r'\s*\.\s*', 'a|b'], nan, regex=True)
res2 = df.copy()
res3 = df.copy()
res2.replace([r'\s*\.\s*', 'a|b'], nan, regex=True, inplace=True)
res3.replace(regex=[r'\s*\.\s*', 'a|b'], value=nan, inplace=True)
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
assert_frame_equal(res3, expec)
def test_regex_replace_str_to_numeric(self):
# what happens when you try to replace a numeric value with a regex?
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
df = DataFrame(mix)
res = df.replace(r'\s*\.\s*', 0, regex=True)
res2 = df.copy()
res2.replace(r'\s*\.\s*', 0, inplace=True, regex=True)
res3 = df.copy()
res3.replace(regex=r'\s*\.\s*', value=0, inplace=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', 0, 0], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
assert_frame_equal(res3, expec)
def test_regex_replace_regex_list_to_numeric(self):
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
df = DataFrame(mix)
res = df.replace([r'\s*\.\s*', 'b'], 0, regex=True)
res2 = df.copy()
res2.replace([r'\s*\.\s*', 'b'], 0, regex=True, inplace=True)
res3 = df.copy()
res3.replace(regex=[r'\s*\.\s*', 'b'], value=0, inplace=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 0, 0, 0], 'c': ['a', 0,
nan,
'd']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
assert_frame_equal(res3, expec)
def test_regex_replace_series_of_regexes(self):
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
df = DataFrame(mix)
s1 = Series({'b': r'\s*\.\s*'})
s2 = Series({'b': nan})
res = df.replace(s1, s2, regex=True)
res2 = df.copy()
res2.replace(s1, s2, inplace=True, regex=True)
res3 = df.copy()
res3.replace(regex=s1, value=s2, inplace=True)
expec = DataFrame({'a': mix['a'], 'b': ['a', 'b', nan, nan], 'c':
mix['c']})
assert_frame_equal(res, expec)
assert_frame_equal(res2, expec)
assert_frame_equal(res3, expec)
def test_regex_replace_numeric_to_object_conversion(self):
mix = {'a': lrange(4), 'b': list('ab..'), 'c': ['a', 'b', nan, 'd']}
df = DataFrame(mix)
expec = DataFrame({'a': ['a', 1, 2, 3], 'b': mix['b'], 'c': mix['c']})
res = df.replace(0, 'a')
assert_frame_equal(res, expec)
self.assertEqual(res.a.dtype, np.object_)
def test_replace_regex_metachar(self):
metachars = '[]', '()', '\d', '\w', '\s'
for metachar in metachars:
df = DataFrame({'a': [metachar, 'else']})
result = df.replace({'a': {metachar: 'paren'}})
expected = DataFrame({'a': ['paren', 'else']})
assert_frame_equal(result, expected)
def test_replace(self):
self.tsframe['A'][:5] = nan
self.tsframe['A'][-5:] = nan
zero_filled = self.tsframe.replace(nan, -1e8)
assert_frame_equal(zero_filled, self.tsframe.fillna(-1e8))
assert_frame_equal(zero_filled.replace(-1e8, nan), self.tsframe)
self.tsframe['A'][:5] = nan
self.tsframe['A'][-5:] = nan
self.tsframe['B'][:5] = -1e8
# empty
df = DataFrame(index=['a', 'b'])
assert_frame_equal(df, df.replace(5, 7))
# GH 11698
# test for mixed data types.
df = pd.DataFrame([('-', pd.to_datetime('20150101')),
('a', pd.to_datetime('20150102'))])
df1 = df.replace('-', np.nan)
expected_df = pd.DataFrame([(np.nan, pd.to_datetime('20150101')),
('a', pd.to_datetime('20150102'))])
assert_frame_equal(df1, expected_df)
def test_replace_list(self):
obj = {'a': list('ab..'), 'b': list('efgh'), 'c': list('helo')}
dfobj = DataFrame(obj)
# lists of regexes and values
# list of [v1, v2, ..., vN] -> [v1, v2, ..., vN]
to_replace_res = [r'.', r'e']
values = [nan, 'crap']
res = dfobj.replace(to_replace_res, values)
expec = DataFrame({'a': ['a', 'b', nan, nan],
'b': ['crap', 'f', 'g', 'h'], 'c': ['h', 'crap',
'l', 'o']})
assert_frame_equal(res, expec)
# list of [v1, v2, ..., vN] -> [v1, v2, .., vN]
to_replace_res = [r'.', r'f']
values = [r'..', r'crap']
res = dfobj.replace(to_replace_res, values)
expec = DataFrame({'a': ['a', 'b', '..', '..'], 'b': ['e', 'crap', 'g',
'h'],
'c': ['h', 'e', 'l', 'o']})
assert_frame_equal(res, expec)
def test_replace_series_dict(self):
# from GH 3064
df = DataFrame({'zero': {'a': 0.0, 'b': 1}, 'one': {'a': 2.0, 'b': 0}})
result = df.replace(0, {'zero': 0.5, 'one': 1.0})
expected = DataFrame(
{'zero': {'a': 0.5, 'b': 1}, 'one': {'a': 2.0, 'b': 1.0}})
assert_frame_equal(result, expected)
result = df.replace(0, df.mean())
assert_frame_equal(result, expected)
# series to series/dict
df = DataFrame({'zero': {'a': 0.0, 'b': 1}, 'one': {'a': 2.0, 'b': 0}})
s = Series({'zero': 0.0, 'one': 2.0})
result = df.replace(s, {'zero': 0.5, 'one': 1.0})
expected = DataFrame(
{'zero': {'a': 0.5, 'b': 1}, 'one': {'a': 1.0, 'b': 0.0}})
assert_frame_equal(result, expected)
result = df.replace(s, df.mean())
assert_frame_equal(result, expected)
def test_replace_convert(self):
# gh 3907
df = DataFrame([['foo', 'bar', 'bah'], ['bar', 'foo', 'bah']])
m = {'foo': 1, 'bar': 2, 'bah': 3}
rep = df.replace(m)
expec = Series([np.int64] * 3)
res = rep.dtypes
assert_series_equal(expec, res)
def test_replace_mixed(self):
self.mixed_frame.ix[5:20, 'foo'] = nan
self.mixed_frame.ix[-10:, 'A'] = nan
result = self.mixed_frame.replace(np.nan, -18)
expected = self.mixed_frame.fillna(value=-18)
assert_frame_equal(result, expected)
assert_frame_equal(result.replace(-18, nan), self.mixed_frame)
result = self.mixed_frame.replace(np.nan, -1e8)
expected = self.mixed_frame.fillna(value=-1e8)
assert_frame_equal(result, expected)
assert_frame_equal(result.replace(-1e8, nan), self.mixed_frame)
# int block upcasting
df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'),
'B': Series([0, 1], dtype='int64')})
expected = DataFrame({'A': Series([1.0, 2.0], dtype='float64'),
'B': Series([0.5, 1], dtype='float64')})
result = df.replace(0, 0.5)
assert_frame_equal(result, expected)
df.replace(0, 0.5, inplace=True)
assert_frame_equal(df, expected)
# int block splitting
df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'),
'B': Series([0, 1], dtype='int64'),
'C': Series([1, 2], dtype='int64')})
expected = DataFrame({'A': Series([1.0, 2.0], dtype='float64'),
'B': Series([0.5, 1], dtype='float64'),
'C': Series([1, 2], dtype='int64')})
result = df.replace(0, 0.5)
assert_frame_equal(result, expected)
# to object block upcasting
df = DataFrame({'A': Series([1.0, 2.0], dtype='float64'),
'B': Series([0, 1], dtype='int64')})
expected = DataFrame({'A': Series([1, 'foo'], dtype='object'),
'B': Series([0, 1], dtype='int64')})
result = df.replace(2, 'foo')
assert_frame_equal(result, expected)
expected = DataFrame({'A': Series(['foo', 'bar'], dtype='object'),
'B': Series([0, 'foo'], dtype='object')})
result = df.replace([1, 2], ['foo', 'bar'])
assert_frame_equal(result, expected)
# test case from
df = DataFrame({'A': Series([3, 0], dtype='int64'),
'B': Series([0, 3], dtype='int64')})
result = df.replace(3, df.mean().to_dict())
expected = df.copy().astype('float64')
m = df.mean()
expected.iloc[0, 0] = m[0]
expected.iloc[1, 1] = m[1]
assert_frame_equal(result, expected)
def test_replace_simple_nested_dict(self):
df = DataFrame({'col': range(1, 5)})
expected = DataFrame({'col': ['a', 2, 3, 'b']})
result = df.replace({'col': {1: 'a', 4: 'b'}})
assert_frame_equal(expected, result)
# in this case, should be the same as the not nested version
result = df.replace({1: 'a', 4: 'b'})
assert_frame_equal(expected, result)
def test_replace_simple_nested_dict_with_nonexistent_value(self):
df = DataFrame({'col': range(1, 5)})
expected = DataFrame({'col': ['a', 2, 3, 'b']})
result = df.replace({-1: '-', 1: 'a', 4: 'b'})
assert_frame_equal(expected, result)
result = df.replace({'col': {-1: '-', 1: 'a', 4: 'b'}})
assert_frame_equal(expected, result)
def test_replace_value_is_none(self):
self.assertRaises(TypeError, self.tsframe.replace, nan)
orig_value = self.tsframe.iloc[0, 0]
orig2 = self.tsframe.iloc[1, 0]
self.tsframe.iloc[0, 0] = nan
self.tsframe.iloc[1, 0] = 1
result = self.tsframe.replace(to_replace={nan: 0})
expected = self.tsframe.T.replace(to_replace={nan: 0}).T
assert_frame_equal(result, expected)
result = self.tsframe.replace(to_replace={nan: 0, 1: -1e8})
tsframe = self.tsframe.copy()
tsframe.iloc[0, 0] = 0
tsframe.iloc[1, 0] = -1e8
expected = tsframe
assert_frame_equal(expected, result)
self.tsframe.iloc[0, 0] = orig_value
self.tsframe.iloc[1, 0] = orig2
def test_replace_for_new_dtypes(self):
# dtypes
tsframe = self.tsframe.copy().astype(np.float32)
tsframe['A'][:5] = nan
tsframe['A'][-5:] = nan
zero_filled = tsframe.replace(nan, -1e8)
assert_frame_equal(zero_filled, tsframe.fillna(-1e8))
assert_frame_equal(zero_filled.replace(-1e8, nan), tsframe)
tsframe['A'][:5] = nan
tsframe['A'][-5:] = nan
tsframe['B'][:5] = -1e8
b = tsframe['B']
b[b == -1e8] = nan
tsframe['B'] = b
result = tsframe.fillna(method='bfill')
assert_frame_equal(result, tsframe.fillna(method='bfill'))
def test_replace_dtypes(self):
# int
df = DataFrame({'ints': [1, 2, 3]})
result = df.replace(1, 0)
expected = DataFrame({'ints': [0, 2, 3]})
assert_frame_equal(result, expected)
df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int32)
result = df.replace(1, 0)
expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int32)
assert_frame_equal(result, expected)
df = DataFrame({'ints': [1, 2, 3]}, dtype=np.int16)
result = df.replace(1, 0)
expected = DataFrame({'ints': [0, 2, 3]}, dtype=np.int16)
assert_frame_equal(result, expected)
# bools
df = DataFrame({'bools': [True, False, True]})
result = df.replace(False, True)
self.assertTrue(result.values.all())
# complex blocks
df = DataFrame({'complex': [1j, 2j, 3j]})
result = df.replace(1j, 0j)
expected = DataFrame({'complex': [0j, 2j, 3j]})
assert_frame_equal(result, expected)
# datetime blocks
prev = datetime.today()
now = datetime.today()
df = DataFrame({'datetime64': Index([prev, now, prev])})
result = df.replace(prev, now)
expected = DataFrame({'datetime64': Index([now] * 3)})
assert_frame_equal(result, expected)
def test_replace_input_formats(self):
# both dicts
to_rep = {'A': np.nan, 'B': 0, 'C': ''}
values = {'A': 0, 'B': -1, 'C': 'missing'}
df = DataFrame({'A': [np.nan, 0, np.inf], 'B': [0, 2, 5],
'C': ['', 'asdf', 'fd']})
filled = df.replace(to_rep, values)
expected = {}
for k, v in compat.iteritems(df):
expected[k] = v.replace(to_rep[k], values[k])
assert_frame_equal(filled, DataFrame(expected))
result = df.replace([0, 2, 5], [5, 2, 0])
expected = DataFrame({'A': [np.nan, 5, np.inf], 'B': [5, 2, 0],
'C': ['', 'asdf', 'fd']})
assert_frame_equal(result, expected)
# dict to scalar
filled = df.replace(to_rep, 0)
expected = {}
for k, v in compat.iteritems(df):
expected[k] = v.replace(to_rep[k], 0)
assert_frame_equal(filled, DataFrame(expected))
self.assertRaises(TypeError, df.replace, to_rep, [np.nan, 0, ''])
# scalar to dict
values = {'A': 0, 'B': -1, 'C': 'missing'}
df = DataFrame({'A': [np.nan, 0, np.nan], 'B': [0, 2, 5],
'C': ['', 'asdf', 'fd']})
filled = df.replace(np.nan, values)
expected = {}
for k, v in compat.iteritems(df):
expected[k] = v.replace(np.nan, values[k])
assert_frame_equal(filled, DataFrame(expected))
# list to list
to_rep = [np.nan, 0, '']
values = [-2, -1, 'missing']
result = df.replace(to_rep, values)<|fim▁hole|> expected.replace(to_rep[i], values[i], inplace=True)
assert_frame_equal(result, expected)
self.assertRaises(ValueError, df.replace, to_rep, values[1:])
# list to scalar
to_rep = [np.nan, 0, '']
result = df.replace(to_rep, -1)
expected = df.copy()
for i in range(len(to_rep)):
expected.replace(to_rep[i], -1, inplace=True)
assert_frame_equal(result, expected)
def test_replace_limit(self):
pass
def test_replace_dict_no_regex(self):
answer = Series({0: 'Strongly Agree', 1: 'Agree', 2: 'Neutral', 3:
'Disagree', 4: 'Strongly Disagree'})
weights = {'Agree': 4, 'Disagree': 2, 'Neutral': 3, 'Strongly Agree':
5, 'Strongly Disagree': 1}
expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1})
result = answer.replace(weights)
assert_series_equal(result, expected)
def test_replace_series_no_regex(self):
answer = Series({0: 'Strongly Agree', 1: 'Agree', 2: 'Neutral', 3:
'Disagree', 4: 'Strongly Disagree'})
weights = Series({'Agree': 4, 'Disagree': 2, 'Neutral': 3,
'Strongly Agree': 5, 'Strongly Disagree': 1})
expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1})
result = answer.replace(weights)
assert_series_equal(result, expected)
def test_replace_dict_tuple_list_ordering_remains_the_same(self):
df = DataFrame(dict(A=[nan, 1]))
res1 = df.replace(to_replace={nan: 0, 1: -1e8})
res2 = df.replace(to_replace=(1, nan), value=[-1e8, 0])
res3 = df.replace(to_replace=[1, nan], value=[-1e8, 0])
expected = DataFrame({'A': [0, -1e8]})
assert_frame_equal(res1, res2)
assert_frame_equal(res2, res3)
assert_frame_equal(res3, expected)
def test_replace_doesnt_replace_without_regex(self):
raw = """fol T_opp T_Dir T_Enh
0 1 0 0 vo
1 2 vr 0 0
2 2 0 0 0
3 3 0 bt 0"""
df = pd.read_csv(StringIO(raw), sep=r'\s+')
res = df.replace({'\D': 1})
assert_frame_equal(df, res)
def test_replace_bool_with_string(self):
df = DataFrame({'a': [True, False], 'b': list('ab')})
result = df.replace(True, 'a')
expected = DataFrame({'a': ['a', False], 'b': df.b})
assert_frame_equal(result, expected)
def test_replace_pure_bool_with_string_no_op(self):
df = DataFrame(np.random.rand(2, 2) > 0.5)
result = df.replace('asdf', 'fdsa')
assert_frame_equal(df, result)
def test_replace_bool_with_bool(self):
df = DataFrame(np.random.rand(2, 2) > 0.5)
result = df.replace(False, True)
expected = DataFrame(np.ones((2, 2), dtype=bool))
assert_frame_equal(result, expected)
def test_replace_with_dict_with_bool_keys(self):
df = DataFrame({0: [True, False], 1: [False, True]})
with tm.assertRaisesRegexp(TypeError, 'Cannot compare types .+'):
df.replace({'asdf': 'asdb', True: 'yes'})
def test_replace_truthy(self):
df = DataFrame({'a': [True, True]})
r = df.replace([np.inf, -np.inf], np.nan)
e = df
assert_frame_equal(r, e)
def test_replace_int_to_int_chain(self):
df = DataFrame({'a': lrange(1, 5)})
with tm.assertRaisesRegexp(ValueError, "Replacement not allowed .+"):
df.replace({'a': dict(zip(range(1, 5), range(2, 6)))})
def test_replace_str_to_str_chain(self):
a = np.arange(1, 5)
astr = a.astype(str)
bstr = np.arange(2, 6).astype(str)
df = DataFrame({'a': astr})
with tm.assertRaisesRegexp(ValueError, "Replacement not allowed .+"):
df.replace({'a': dict(zip(astr, bstr))})
def test_replace_swapping_bug(self):
df = pd.DataFrame({'a': [True, False, True]})
res = df.replace({'a': {True: 'Y', False: 'N'}})
expect = pd.DataFrame({'a': ['Y', 'N', 'Y']})
assert_frame_equal(res, expect)
df = pd.DataFrame({'a': [0, 1, 0]})
res = df.replace({'a': {0: 'Y', 1: 'N'}})
expect = pd.DataFrame({'a': ['Y', 'N', 'Y']})
assert_frame_equal(res, expect)
def test_replace_period(self):
d = {
'fname': {
'out_augmented_AUG_2011.json':
pd.Period(year=2011, month=8, freq='M'),
'out_augmented_JAN_2011.json':
pd.Period(year=2011, month=1, freq='M'),
'out_augmented_MAY_2012.json':
pd.Period(year=2012, month=5, freq='M'),
'out_augmented_SUBSIDY_WEEK.json':
pd.Period(year=2011, month=4, freq='M'),
'out_augmented_AUG_2012.json':
pd.Period(year=2012, month=8, freq='M'),
'out_augmented_MAY_2011.json':
pd.Period(year=2011, month=5, freq='M'),
'out_augmented_SEP_2013.json':
pd.Period(year=2013, month=9, freq='M')}}
df = pd.DataFrame(['out_augmented_AUG_2012.json',
'out_augmented_SEP_2013.json',
'out_augmented_SUBSIDY_WEEK.json',
'out_augmented_MAY_2012.json',
'out_augmented_MAY_2011.json',
'out_augmented_AUG_2011.json',
'out_augmented_JAN_2011.json'], columns=['fname'])
tm.assert_equal(set(df.fname.values), set(d['fname'].keys()))
expected = DataFrame({'fname': [d['fname'][k]
for k in df.fname.values]})
result = df.replace(d)
assert_frame_equal(result, expected)
def test_replace_datetime(self):
d = {'fname':
{'out_augmented_AUG_2011.json': pd.Timestamp('2011-08'),
'out_augmented_JAN_2011.json': pd.Timestamp('2011-01'),
'out_augmented_MAY_2012.json': pd.Timestamp('2012-05'),
'out_augmented_SUBSIDY_WEEK.json': pd.Timestamp('2011-04'),
'out_augmented_AUG_2012.json': pd.Timestamp('2012-08'),
'out_augmented_MAY_2011.json': pd.Timestamp('2011-05'),
'out_augmented_SEP_2013.json': pd.Timestamp('2013-09')}}
df = pd.DataFrame(['out_augmented_AUG_2012.json',
'out_augmented_SEP_2013.json',
'out_augmented_SUBSIDY_WEEK.json',
'out_augmented_MAY_2012.json',
'out_augmented_MAY_2011.json',
'out_augmented_AUG_2011.json',
'out_augmented_JAN_2011.json'], columns=['fname'])
tm.assert_equal(set(df.fname.values), set(d['fname'].keys()))
expected = DataFrame({'fname': [d['fname'][k]
for k in df.fname.values]})
result = df.replace(d)
assert_frame_equal(result, expected)
def test_replace_datetimetz(self):
# GH 11326
# behaving poorly when presented with a datetime64[ns, tz]
df = DataFrame({'A': date_range('20130101', periods=3,
tz='US/Eastern'),
'B': [0, np.nan, 2]})
result = df.replace(np.nan, 1)
expected = DataFrame({'A': date_range('20130101', periods=3,
tz='US/Eastern'),
'B': Series([0, 1, 2], dtype='float64')})
assert_frame_equal(result, expected)
result = df.fillna(1)
assert_frame_equal(result, expected)
result = df.replace(0, np.nan)
expected = DataFrame({'A': date_range('20130101', periods=3,
tz='US/Eastern'),
'B': [np.nan, np.nan, 2]})
assert_frame_equal(result, expected)
result = df.replace(Timestamp('20130102', tz='US/Eastern'),
Timestamp('20130104', tz='US/Eastern'))
expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'),
Timestamp('20130104', tz='US/Eastern'),
Timestamp('20130103', tz='US/Eastern')],
'B': [0, np.nan, 2]})
assert_frame_equal(result, expected)
result = df.copy()
result.iloc[1, 0] = np.nan
result = result.replace(
{'A': pd.NaT}, Timestamp('20130104', tz='US/Eastern'))
assert_frame_equal(result, expected)
# coerce to object
result = df.copy()
result.iloc[1, 0] = np.nan
result = result.replace(
{'A': pd.NaT}, Timestamp('20130104', tz='US/Pacific'))
expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'),
Timestamp('20130104', tz='US/Pacific'),
Timestamp('20130103', tz='US/Eastern')],
'B': [0, np.nan, 2]})
assert_frame_equal(result, expected)
result = df.copy()
result.iloc[1, 0] = np.nan
result = result.replace({'A': np.nan}, Timestamp('20130104'))
expected = DataFrame({'A': [Timestamp('20130101', tz='US/Eastern'),
Timestamp('20130104'),
Timestamp('20130103', tz='US/Eastern')],
'B': [0, np.nan, 2]})
assert_frame_equal(result, expected)<|fim▁end|> | expected = df.copy()
for i in range(len(to_rep)): |
<|file_name|>res_company.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, _
class ResCompany(models.Model):
_inherit = "res.company"
@api.model
def create(self, vals):
new_company = super(ResCompany, self).create(vals)
ProductPricelist = self.env['product.pricelist']
pricelist = ProductPricelist.search([('currency_id', '=', new_company.currency_id.id), ('company_id', '=', False)], limit=1)
if not pricelist:
pricelist = ProductPricelist.create({
'name': new_company.name,
'currency_id': new_company.currency_id.id,
})
field_id = self.env['ir.model.fields'].search([('model', '=', 'res.partner'), ('name', '=', 'property_product_pricelist')])
self.env['ir.property'].create({
'name': 'property_product_pricelist',
'company_id': new_company.id,
'value_reference': 'product.pricelist,%s' % pricelist.id,
'fields_id': field_id.id
})<|fim▁hole|><|fim▁end|> | return new_company |
<|file_name|>x86.rs<|end_file_name|><|fim▁begin|>#[repr(C)]
#[derive(Copy)]
pub struct glob_t {
pub gl_pathc: ::size_t,
pub gl_pathv: *mut *mut ::schar_t,
pub gl_offs: ::size_t,
pub gl_flags: ::int_t,
pub gl_closedir: fn(*mut ::void_t, ),
pub gl_readdir: fn(*mut ::void_t, ) -> *mut ::void_t,
pub gl_opendir: fn(*const ::schar_t, ) -> *mut ::void_t,
pub gl_lstat: fn(*const ::schar_t, *mut ::void_t, ) -> ::int_t,
pub gl_stat: fn(*const ::schar_t, *mut ::void_t, ) -> ::int_t,
}
new!(glob_t);
pub const GLOB_APPEND: ::int_t = (1 << 5);
pub const GLOB_DOOFFS: ::int_t = (1 << 3);
pub const GLOB_ERR: ::int_t = (1 << 0);<|fim▁hole|>pub const GLOB_NOCHECK: ::int_t = (1 << 4);
pub const GLOB_NOESCAPE: ::int_t = (1 << 6);
pub const GLOB_NOSORT: ::int_t = (1 << 2);
pub const GLOB_ABORTED: ::int_t = 2;
pub const GLOB_NOMATCH: ::int_t = 3;
pub const GLOB_NOSPACE: ::int_t = 1;<|fim▁end|> | pub const GLOB_MARK: ::int_t = (1 << 1); |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp');
var mocha = require('gulp-mocha');
var istanbul = require('gulp-istanbul');
var eslint = require('gulp-eslint');
var coveralls = require('gulp-coveralls');
gulp.task('pre-test', function () {
return gulp.src(['lib/**/*.js', '!lib/micro-whalla.js', '!lib/helpers.js'])
.pipe(istanbul({ includeUntested: true }))
.pipe(istanbul.hookRequire());<|fim▁hole|> return gulp.src('test/*.test.js')
.pipe(mocha())
.pipe(istanbul.writeReports());
});
gulp.task('lint', function () {
return gulp.src(['**/*.js', '!node_modules/**', '!coverage/**'])
.pipe(eslint())
.pipe(eslint.format())
.pipe(eslint.failAfterError());
});
gulp.task('coveralls', function () {
return gulp.src('coverage/**/lcov.info')
.pipe(coveralls());
});
gulp.task('default', ['test'], function () {
});<|fim▁end|> | });
gulp.task('test', ['lint', 'pre-test'], function () { |
<|file_name|>hash_standard_resize_policy_imp.hpp<|end_file_name|><|fim▁begin|>// -*- C++ -*-
// Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the terms
// of the GNU General Public License as published by the Free Software
// Foundation; either version 3, or (at your option) any later
// version.
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
// Under Section 7 of GPL version 3, you are granted additional
// permissions described in the GCC Runtime Library Exception, version
// 3.1, as published by the Free Software Foundation.
// You should have received a copy of the GNU General Public License and
// a copy of the GCC Runtime Library Exception along with this program;
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
// <http://www.gnu.org/licenses/>.
// Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL.
// Permission to use, copy, modify, sell, and distribute this software
// is hereby granted without fee, provided that the above copyright
// notice appears in all copies, and that both that copyright notice
// and this permission notice appear in supporting documentation. None
// of the above authors, nor IBM Haifa Research Laboratories, make any
// representation about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
/**
* @file hash_standard_resize_policy_imp.hpp
* Contains a resize policy implementation.
*/
PB_DS_CLASS_T_DEC
PB_DS_CLASS_C_DEC::
hash_standard_resize_policy()
: m_size(Size_Policy::get_nearest_larger_size(1))
{ trigger_policy_base::notify_externally_resized(m_size); }
PB_DS_CLASS_T_DEC
PB_DS_CLASS_C_DEC::
hash_standard_resize_policy(const Size_Policy& r_size_policy)
: Size_Policy(r_size_policy), m_size(Size_Policy::get_nearest_larger_size(1))
{ trigger_policy_base::notify_externally_resized(m_size); }
PB_DS_CLASS_T_DEC
PB_DS_CLASS_C_DEC::
hash_standard_resize_policy(const Size_Policy& r_size_policy,
const Trigger_Policy& r_trigger_policy)
: Size_Policy(r_size_policy), Trigger_Policy(r_trigger_policy),
m_size(Size_Policy::get_nearest_larger_size(1))
{ trigger_policy_base::notify_externally_resized(m_size); }
PB_DS_CLASS_T_DEC
PB_DS_CLASS_C_DEC::
~hash_standard_resize_policy()
{ }
PB_DS_CLASS_T_DEC
void
PB_DS_CLASS_C_DEC::
swap(PB_DS_CLASS_C_DEC& other)
{
trigger_policy_base::swap(other);
size_policy_base::swap(other);
std::swap(m_size, other.m_size);
}
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_find_search_start()
{ trigger_policy_base::notify_find_search_start(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_find_search_collision()
{ trigger_policy_base::notify_find_search_collision(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_find_search_end()
{ trigger_policy_base::notify_find_search_end(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_insert_search_start()
{ trigger_policy_base::notify_insert_search_start(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_insert_search_collision()
{ trigger_policy_base::notify_insert_search_collision(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_insert_search_end()
{ trigger_policy_base::notify_insert_search_end(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_erase_search_start()
{ trigger_policy_base::notify_erase_search_start(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_erase_search_collision()
{ trigger_policy_base::notify_erase_search_collision(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_erase_search_end()
{ trigger_policy_base::notify_erase_search_end(); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_inserted(size_type num_e)
{ trigger_policy_base::notify_inserted(num_e); }
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
notify_erased(size_type num_e)
{ trigger_policy_base::notify_erased(num_e); }
PB_DS_CLASS_T_DEC
void
PB_DS_CLASS_C_DEC::
notify_cleared()
{ trigger_policy_base::notify_cleared(); }
PB_DS_CLASS_T_DEC
inline bool
PB_DS_CLASS_C_DEC::
is_resize_needed() const
{ return trigger_policy_base::is_resize_needed(); }
PB_DS_CLASS_T_DEC
typename PB_DS_CLASS_C_DEC::size_type
PB_DS_CLASS_C_DEC::
get_new_size(size_type size, size_type num_used_e) const
{
if (trigger_policy_base::is_grow_needed(size, num_used_e))
return size_policy_base::get_nearest_larger_size(size);
return size_policy_base::get_nearest_smaller_size(size);
}
PB_DS_CLASS_T_DEC
void
PB_DS_CLASS_C_DEC::
notify_resized(size_type new_size)
{
trigger_policy_base::notify_resized(new_size);
m_size = new_size;
}
PB_DS_CLASS_T_DEC
inline typename PB_DS_CLASS_C_DEC::size_type
PB_DS_CLASS_C_DEC::
get_actual_size() const
{
PB_DS_STATIC_ASSERT(access, external_size_access);
return m_size;
}
PB_DS_CLASS_T_DEC
void
PB_DS_CLASS_C_DEC::
resize(size_type new_size)
{
PB_DS_STATIC_ASSERT(access, external_size_access);
size_type actual_size = size_policy_base::get_nearest_larger_size(1);
while (actual_size < new_size)
{
const size_type pot = size_policy_base::get_nearest_larger_size(actual_size);
if (pot == actual_size && pot < new_size)
__throw_resize_error();
actual_size = pot;
}
if (actual_size > 0)
--actual_size;
const size_type old_size = m_size;
__try
{
do_resize(actual_size - 1);
}
__catch(insert_error& )
{
m_size = old_size;
__throw_resize_error();
}
__catch(...)
{
m_size = old_size;
__throw_exception_again;
}
}
PB_DS_CLASS_T_DEC
void
PB_DS_CLASS_C_DEC::
do_resize(size_type)<|fim▁hole|> // Do nothing
}
PB_DS_CLASS_T_DEC
Trigger_Policy&
PB_DS_CLASS_C_DEC::
get_trigger_policy()
{ return *this; }
PB_DS_CLASS_T_DEC
const Trigger_Policy&
PB_DS_CLASS_C_DEC::
get_trigger_policy() const
{ return *this; }
PB_DS_CLASS_T_DEC
Size_Policy&
PB_DS_CLASS_C_DEC::
get_size_policy()
{ return *this; }
PB_DS_CLASS_T_DEC
const Size_Policy&
PB_DS_CLASS_C_DEC::
get_size_policy() const
{ return *this; }<|fim▁end|> | { |
<|file_name|>strings.js<|end_file_name|><|fim▁begin|>/*global define*/<|fim▁hole|> "description": "座標グリッド オーバーレイを表示するカスタム Web AppBuilder ウィジェットです。"
});<|fim▁end|> | define({
"_widgetLabel": "グリッド オーバーレイ", |
<|file_name|>entry.py<|end_file_name|><|fim▁begin|>from mitra import db
class Entry(db.Model):
id = db.Column(db.Integer, primary_key=True)
category_name = db.Column(db.String(120), db.ForeignKey('category.name'))
userid = db.Column(db.Integer, db.ForeignKey('user.id'))
date = db.Column(db.Date)
name = db.Column(db.String(120))
amount = db.Column(db.Integer)
def __init__(self, userid, category, date, name, amount):
self.userid = userid<|fim▁hole|> self.name = name
self.amount = amount<|fim▁end|> | self.category_name = category
self.date = date |
<|file_name|>simple_resize.py<|end_file_name|><|fim▁begin|>import logging
import pygame
from .. import Collage<|fim▁hole|>class SimpleResize(Collage):
"""
Example class for collage plugins
- Takes a single image and resizes it
"""
name = 'simple resize'
def __init__(self, config):
super(SimpleResize, self).__init__(config)
def generate(self, size):
wallpapers = self._get_wallpapers()
logging.debug('Generating...')
collage = pygame.Surface(size)
wp_offset, wp = self._resize_wallpaper(wallpapers[0], size)
collage.blit(wp, (0,0), pygame.Rect(wp_offset, size))
logging.debug('Generation complete')
return collage
def _get_wallpapers(self):
return self.wallpaper_source.pop()<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod clicked_not_mango;
mod clicked_was_mango;<|fim▁hole|><|fim▁end|> | mod not_clicked_not_mango;
mod not_clicked_was_mango; |
<|file_name|>issue-2834.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test case for issue #2843.
//
proto! streamp (
open:send<T:Send> {
data(T) -> open<T>
}
)
fn rendezvous() {
let (s, c) = streamp::init();
let streams: ~[streamp::client::open<int>] = ~[c];
<|fim▁hole|>}
pub fn main() {
//os::getenv("FOO");
rendezvous();
}<|fim▁end|> | error!("%?", streams[0]); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.