prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>edittime.js<|end_file_name|><|fim▁begin|><|fim▁hole|>function GetBehaviorSettings()
{
return {
"name": "Replacer",
"id": "Rex_Replacer",
"description": "Replace instancne by fade-out itself, and create the target instance then fade-in it.",
"author": "Rex.Rainbow",
"help url": "https://dl.dropbox.com/u/5779181/C2Repo/rex_replacer.html",
"category": "Rex - Movement - opacity",
"flags": bf_onlyone
};
};
//////////////////////////////////////////////////////////////
// Conditions
AddCondition(1, cf_trigger, "On fade-out started", "Fade out", "On {my} fade-out started",
"Triggered when fade-out started", "OnFadeOutStart");
AddCondition(2, cf_trigger, "On fade-out finished", "Fade out", "On {my} fade-out finished",
"Triggered when fade-out finished", "OnFadeOutFinish");
AddCondition(3, cf_trigger, "On fade-in started", "Fade in", "On {my} fade-in started",
"Triggered when fade-out started", "OnFadeInStart");
AddCondition(4, cf_trigger, "On fade-in finished", "Fade in", "On {my} fade-in finished",
"Triggered when fade-in finished", "OnFadeInFinish");
AddCondition(5, 0, "Is fade-out", "Fade out", "Is {my} fade-out",
"Return true if instance is in fade-out stage", "IsFadeOut");
AddCondition(6, 0, "Is fade-in", "Fade in", "Is {my} fade-in",
"Return true if instance is in fade-in stage", "IsFadeIn");
AddCondition(7, 0, "Is idle", "Idle", "Is {my} idle",
"Return true if instance is in idle stage", "IsIdle");
//////////////////////////////////////////////////////////////
// Actions
AddObjectParam("Target", "Target type of replacing instance.");
AddAction(1, 0, "Replace instance", "Replace",
"{my} Replace to {0}","Replace instance.", "ReplaceInst");
AddStringParam("Target", "Target type in nickname of replacing instance.", '""');
AddAction(2, 0, "Replace instance to nickname type", "Replace",
"Replace {my} to nickname: <i>{0}</i>","Replace instance to nickname type.", "ReplaceInst");
AddNumberParam("Duration", "Duration of fade-out or fade in, in seconds.");
AddAction(3, 0, "Set duration", "Configure", "Set {my} fade duration to <i>{0}</i>", "Set the object's fade duration.", "SetDuration");
//////////////////////////////////////////////////////////////
// Expressions
AddExpression(1, ef_return_number, "Get UID of replacing instance", "UID", "ReplacingInstUID",
"The UID of replacing instanc, return -1 if the replacing does not start.");
AddExpression(2, ef_return_number, "Get UID of replacing instance", "UID", "ReplacingInstUID",
"The UID of replacing instanc, return -1 if the replacing does not start.");
ACESDone();
// Property grid properties for this plugin
var property_list = [
new cr.Property(ept_float, "Fade duration", 1, "Duration of fade-out or fade-in, in seconds."),
];
// Called by IDE when a new behavior type is to be created
function CreateIDEBehaviorType()
{
return new IDEBehaviorType();
}
// Class representing a behavior type in the IDE
function IDEBehaviorType()
{
assert2(this instanceof arguments.callee, "Constructor called as a function");
}
// Called by IDE when a new behavior instance of this type is to be created
IDEBehaviorType.prototype.CreateInstance = function(instance)
{
return new IDEInstance(instance, this);
}
// Class representing an individual instance of an object in the IDE
function IDEInstance(instance, type)
{
assert2(this instanceof arguments.callee, "Constructor called as a function");
// Save the constructor parameters
this.instance = instance;
this.type = type;
// Set the default property values from the property table
this.properties = {};
for (var i = 0; i < property_list.length; i++)
this.properties[property_list[i].name] = property_list[i].initial_value;
}
// Called by the IDE after all initialization on this instance has been completed
IDEInstance.prototype.OnCreate = function()
{
}
// Called by the IDE after a property has been changed
IDEInstance.prototype.OnPropertyChanged = function(property_name)
{
// Clamp values
if (this.properties["Fade duration"] < 0)
this.properties["Fade duration"] = 0;
}<|fim▁end|> | |
<|file_name|>root.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Smart pointers for the JS-managed DOM objects.
//!
//! The DOM is made up of DOM objects whose lifetime is entirely controlled by
//! the whims of the SpiderMonkey garbage collector. The types in this module
//! are designed to ensure that any interactions with said Rust types only
//! occur on values that will remain alive the entire time.
//!
//! Here is a brief overview of the important types:
//!
//! - `Root<T>`: a stack-based rooted value.
//! - `DomRoot<T>`: a stack-based reference to a rooted DOM object.
//! - `Dom<T>`: a reference to a DOM object that can automatically be traced by
//! the GC when encountered as a field of a Rust structure.
//!
//! `Dom<T>` does not allow access to their inner value without explicitly
//! creating a stack-based root via the `root` method. This returns a `DomRoot<T>`,
//! which causes the JS-owned value to be uncollectable for the duration of the
//! `Root` object's lifetime. A reference to the object can then be obtained
//! from the `Root` object. These references are not allowed to outlive their
//! originating `DomRoot<T>`.
//!
use crate::dom::bindings::conversions::DerivedFrom;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::{DomObject, Reflector};
use crate::dom::bindings::trace::trace_reflector;
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::node::Node;
use js::jsapi::{Heap, JSObject, JSTracer};
use js::rust::GCMethods;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use mitochondria::OnceCell;
use script_layout_interface::TrustedNodeAddress;
use std::cell::{Cell, UnsafeCell};
use std::default::Default;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::mem;
use std::ops::Deref;
use std::ptr;
use std::rc::Rc;
use style::thread_state;
/// A rooted value.
#[allow(unrooted_must_root)]
#[allow_unrooted_interior]
pub struct Root<T: StableTraceObject> {
/// The value to root.
value: T,
/// List that ensures correct dynamic root ordering
root_list: *const RootCollection,
}
impl<T> Root<T>
where
T: StableTraceObject + 'static,
{
/// Create a new stack-bounded root for the provided value.
/// It cannot outlive its associated `RootCollection`, and it gives
/// out references which cannot outlive this new `Root`.
#[allow(unrooted_must_root)]
unsafe fn new(value: T) -> Self {
debug_assert!(thread_state::get().is_script());
STACK_ROOTS.with(|ref root_list| {
let root_list = &*root_list.get().unwrap();
root_list.root(value.stable_trace_object());
Root { value, root_list }
})
}
}
/// Represents values that can be rooted through a stable address that will
/// not change for their whole lifetime.
pub unsafe trait StableTraceObject {
/// Returns a stable trace object which address won't change for the whole
/// lifetime of the value.
fn stable_trace_object(&self) -> *const dyn JSTraceable;
}
unsafe impl<T> StableTraceObject for Dom<T>
where
T: DomObject,
{
fn stable_trace_object<'a>(&'a self) -> *const dyn JSTraceable {
// The JSTraceable impl for Reflector doesn't actually do anything,
// so we need this shenanigan to actually trace the reflector of the
// T pointer in Dom<T>.
#[allow(unrooted_must_root)]
struct ReflectorStackRoot(Reflector);
unsafe impl JSTraceable for ReflectorStackRoot {
unsafe fn trace(&self, tracer: *mut JSTracer) {
trace_reflector(tracer, "on stack", &self.0);
}
}
unsafe { &*(self.reflector() as *const Reflector as *const ReflectorStackRoot) }
}
}<|fim▁hole|>where
T: Deref + StableTraceObject,
{
type Target = <T as Deref>::Target;
fn deref(&self) -> &Self::Target {
debug_assert!(thread_state::get().is_script());
&self.value
}
}
impl<T> Drop for Root<T>
where
T: StableTraceObject,
{
fn drop(&mut self) {
unsafe {
(*self.root_list).unroot(self.value.stable_trace_object());
}
}
}
/// A rooted reference to a DOM object.
pub type DomRoot<T> = Root<Dom<T>>;
impl<T: Castable> DomRoot<T> {
/// Cast a DOM object root upwards to one of the interfaces it derives from.
pub fn upcast<U>(root: DomRoot<T>) -> DomRoot<U>
where
U: Castable,
T: DerivedFrom<U>,
{
unsafe { mem::transmute(root) }
}
/// Cast a DOM object root downwards to one of the interfaces it might implement.
pub fn downcast<U>(root: DomRoot<T>) -> Option<DomRoot<U>>
where
U: DerivedFrom<T>,
{
if root.is::<U>() {
Some(unsafe { mem::transmute(root) })
} else {
None
}
}
}
impl<T: DomObject> DomRoot<T> {
/// Generate a new root from a reference
pub fn from_ref(unrooted: &T) -> DomRoot<T> {
unsafe { DomRoot::new(Dom::from_ref(unrooted)) }
}
}
impl<T> MallocSizeOf for DomRoot<T>
where
T: DomObject + MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
(**self).size_of(ops)
}
}
impl<T> PartialEq for DomRoot<T>
where
T: DomObject,
{
fn eq(&self, other: &Self) -> bool {
self.value == other.value
}
}
impl<T> Clone for DomRoot<T>
where
T: DomObject,
{
fn clone(&self) -> DomRoot<T> {
DomRoot::from_ref(&*self)
}
}
unsafe impl<T> JSTraceable for DomRoot<T>
where
T: DomObject,
{
unsafe fn trace(&self, _: *mut JSTracer) {
// Already traced.
}
}
/// A rooting mechanism for reflectors on the stack.
/// LIFO is not required.
///
/// See also [*Exact Stack Rooting - Storing a GCPointer on the CStack*]
/// (https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/Internals/GC/Exact_Stack_Rooting).
pub struct RootCollection {
roots: UnsafeCell<Vec<*const dyn JSTraceable>>,
}
thread_local!(static STACK_ROOTS: Cell<Option<*const RootCollection>> = Cell::new(None));
pub struct ThreadLocalStackRoots<'a>(PhantomData<&'a u32>);
impl<'a> ThreadLocalStackRoots<'a> {
pub fn new(roots: &'a RootCollection) -> Self {
STACK_ROOTS.with(|ref r| r.set(Some(roots)));
ThreadLocalStackRoots(PhantomData)
}
}
impl<'a> Drop for ThreadLocalStackRoots<'a> {
fn drop(&mut self) {
STACK_ROOTS.with(|ref r| r.set(None));
}
}
impl RootCollection {
/// Create an empty collection of roots
pub fn new() -> RootCollection {
debug_assert!(thread_state::get().is_script());
RootCollection {
roots: UnsafeCell::new(vec![]),
}
}
/// Starts tracking a trace object.
unsafe fn root(&self, object: *const dyn JSTraceable) {
debug_assert!(thread_state::get().is_script());
(*self.roots.get()).push(object);
}
/// Stops tracking a trace object, asserting if it isn't found.
unsafe fn unroot(&self, object: *const dyn JSTraceable) {
debug_assert!(thread_state::get().is_script());
let roots = &mut *self.roots.get();
match roots.iter().rposition(|r| *r == object) {
Some(idx) => {
roots.remove(idx);
},
None => panic!("Can't remove a root that was never rooted!"),
}
}
}
/// SM Callback that traces the rooted reflectors
pub unsafe fn trace_roots(tracer: *mut JSTracer) {
debug!("tracing stack roots");
STACK_ROOTS.with(|ref collection| {
let collection = &*(*collection.get().unwrap()).roots.get();
for root in collection {
(**root).trace(tracer);
}
});
}
/// Get a reference out of a rooted value.
pub trait RootedReference<'root> {
/// The type of the reference.
type Ref: 'root;
/// Obtain a reference out of the rooted value.
fn r(&'root self) -> Self::Ref;
}
impl<'root, T: DomObject + 'root> RootedReference<'root> for DomRoot<T> {
type Ref = &'root T;
fn r(&'root self) -> &'root T {
self
}
}
impl<'root, T: DomObject + 'root> RootedReference<'root> for Dom<T> {
type Ref = &'root T;
fn r(&'root self) -> &'root T {
&self
}
}
impl<'root, T: JSTraceable + DomObject + 'root> RootedReference<'root> for [Dom<T>] {
type Ref = &'root [&'root T];
fn r(&'root self) -> &'root [&'root T] {
unsafe { mem::transmute(self) }
}
}
impl<'root, T: DomObject + 'root> RootedReference<'root> for Rc<T> {
type Ref = &'root T;
fn r(&'root self) -> &'root T {
self
}
}
impl<'root, T: RootedReference<'root> + 'root> RootedReference<'root> for Option<T> {
type Ref = Option<T::Ref>;
fn r(&'root self) -> Option<T::Ref> {
self.as_ref().map(RootedReference::r)
}
}
/// A traced reference to a DOM object
///
/// This type is critical to making garbage collection work with the DOM,
/// but it is very dangerous; if garbage collection happens with a `Dom<T>`
/// on the stack, the `Dom<T>` can point to freed memory.
///
/// This should only be used as a field in other DOM objects.
#[must_root]
pub struct Dom<T> {
ptr: ptr::NonNull<T>,
}
// Dom<T> is similar to Rc<T>, in that it's not always clear how to avoid double-counting.
// For now, we choose not to follow any such pointers.
impl<T> MallocSizeOf for Dom<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
0
}
}
impl<T> Dom<T> {
/// Returns `LayoutDom<T>` containing the same pointer.
pub unsafe fn to_layout(&self) -> LayoutDom<T> {
debug_assert!(thread_state::get().is_layout());
LayoutDom {
ptr: self.ptr.clone(),
}
}
}
impl<T: DomObject> Dom<T> {
/// Create a Dom<T> from a &T
#[allow(unrooted_must_root)]
pub fn from_ref(obj: &T) -> Dom<T> {
debug_assert!(thread_state::get().is_script());
Dom {
ptr: ptr::NonNull::from(obj),
}
}
}
impl<T: DomObject> Deref for Dom<T> {
type Target = T;
fn deref(&self) -> &T {
debug_assert!(thread_state::get().is_script());
// We can only have &Dom<T> from a rooted thing, so it's safe to deref
// it to &T.
unsafe { &*self.ptr.as_ptr() }
}
}
unsafe impl<T: DomObject> JSTraceable for Dom<T> {
unsafe fn trace(&self, trc: *mut JSTracer) {
#[cfg(all(feature = "unstable", debug_assertions))]
let trace_str = format!("for {} on heap", ::std::intrinsics::type_name::<T>());
#[cfg(all(feature = "unstable", debug_assertions))]
let trace_info = &trace_str[..];
#[cfg(not(all(feature = "unstable", debug_assertions)))]
let trace_info = "for DOM object on heap";
trace_reflector(trc, trace_info, (*self.ptr.as_ptr()).reflector());
}
}
/// An unrooted reference to a DOM object for use in layout. `Layout*Helpers`
/// traits must be implemented on this.
#[allow_unrooted_interior]
pub struct LayoutDom<T> {
ptr: ptr::NonNull<T>,
}
impl<T: Castable> LayoutDom<T> {
/// Cast a DOM object root upwards to one of the interfaces it derives from.
pub fn upcast<U>(&self) -> LayoutDom<U>
where
U: Castable,
T: DerivedFrom<U>,
{
debug_assert!(thread_state::get().is_layout());
let ptr: *mut T = self.ptr.as_ptr();
LayoutDom {
ptr: unsafe { ptr::NonNull::new_unchecked(ptr as *mut U) },
}
}
/// Cast a DOM object downwards to one of the interfaces it might implement.
pub fn downcast<U>(&self) -> Option<LayoutDom<U>>
where
U: DerivedFrom<T>,
{
debug_assert!(thread_state::get().is_layout());
unsafe {
if (*self.unsafe_get()).is::<U>() {
let ptr: *mut T = self.ptr.as_ptr();
Some(LayoutDom {
ptr: ptr::NonNull::new_unchecked(ptr as *mut U),
})
} else {
None
}
}
}
}
impl<T: DomObject> LayoutDom<T> {
/// Get the reflector.
pub unsafe fn get_jsobject(&self) -> *mut JSObject {
debug_assert!(thread_state::get().is_layout());
(*self.ptr.as_ptr()).reflector().get_jsobject().get()
}
}
impl<T> Copy for LayoutDom<T> {}
impl<T> PartialEq for Dom<T> {
fn eq(&self, other: &Dom<T>) -> bool {
self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
impl<'a, T: DomObject> PartialEq<&'a T> for Dom<T> {
fn eq(&self, other: &&'a T) -> bool {
*self == Dom::from_ref(*other)
}
}
impl<T> Eq for Dom<T> {}
impl<T> PartialEq for LayoutDom<T> {
fn eq(&self, other: &LayoutDom<T>) -> bool {
self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
impl<T> Eq for LayoutDom<T> {}
impl<T> Hash for Dom<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ptr.as_ptr().hash(state)
}
}
impl<T> Hash for LayoutDom<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ptr.as_ptr().hash(state)
}
}
impl<T> Clone for Dom<T> {
#[inline]
#[allow(unrooted_must_root)]
fn clone(&self) -> Dom<T> {
debug_assert!(thread_state::get().is_script());
Dom {
ptr: self.ptr.clone(),
}
}
}
impl<T> Clone for LayoutDom<T> {
#[inline]
fn clone(&self) -> LayoutDom<T> {
debug_assert!(thread_state::get().is_layout());
LayoutDom {
ptr: self.ptr.clone(),
}
}
}
impl LayoutDom<Node> {
/// Create a new JS-owned value wrapped from an address known to be a
/// `Node` pointer.
pub unsafe fn from_trusted_node_address(inner: TrustedNodeAddress) -> LayoutDom<Node> {
debug_assert!(thread_state::get().is_layout());
let TrustedNodeAddress(addr) = inner;
LayoutDom {
ptr: ptr::NonNull::new_unchecked(addr as *const Node as *mut Node),
}
}
}
/// A holder that provides interior mutability for GC-managed values such as
/// `Dom<T>`. Essentially a `Cell<Dom<T>>`, but safer.
///
/// This should only be used as a field in other DOM objects; see warning
/// on `Dom<T>`.
#[must_root]
#[derive(JSTraceable)]
pub struct MutDom<T: DomObject> {
val: UnsafeCell<Dom<T>>,
}
impl<T: DomObject> MutDom<T> {
/// Create a new `MutDom`.
pub fn new(initial: &T) -> MutDom<T> {
debug_assert!(thread_state::get().is_script());
MutDom {
val: UnsafeCell::new(Dom::from_ref(initial)),
}
}
/// Set this `MutDom` to the given value.
pub fn set(&self, val: &T) {
debug_assert!(thread_state::get().is_script());
unsafe {
*self.val.get() = Dom::from_ref(val);
}
}
/// Get the value in this `MutDom`.
pub fn get(&self) -> DomRoot<T> {
debug_assert!(thread_state::get().is_script());
unsafe { DomRoot::from_ref(&*ptr::read(self.val.get())) }
}
}
impl<T: DomObject> MallocSizeOf for MutDom<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// See comment on MallocSizeOf for Dom<T>.
0
}
}
impl<T: DomObject> PartialEq for MutDom<T> {
fn eq(&self, other: &Self) -> bool {
unsafe { *self.val.get() == *other.val.get() }
}
}
impl<T: DomObject + PartialEq> PartialEq<T> for MutDom<T> {
fn eq(&self, other: &T) -> bool {
unsafe { **self.val.get() == *other }
}
}
/// A holder that provides interior mutability for GC-managed values such as
/// `Dom<T>`, with nullability represented by an enclosing Option wrapper.
/// Essentially a `Cell<Option<Dom<T>>>`, but safer.
///
/// This should only be used as a field in other DOM objects; see warning
/// on `Dom<T>`.
#[must_root]
#[derive(JSTraceable)]
pub struct MutNullableDom<T: DomObject> {
ptr: UnsafeCell<Option<Dom<T>>>,
}
impl<T: DomObject> MutNullableDom<T> {
/// Create a new `MutNullableDom`.
pub fn new(initial: Option<&T>) -> MutNullableDom<T> {
debug_assert!(thread_state::get().is_script());
MutNullableDom {
ptr: UnsafeCell::new(initial.map(Dom::from_ref)),
}
}
/// Retrieve a copy of the current inner value. If it is `None`, it is
/// initialized with the result of `cb` first.
pub fn or_init<F>(&self, cb: F) -> DomRoot<T>
where
F: FnOnce() -> DomRoot<T>,
{
debug_assert!(thread_state::get().is_script());
match self.get() {
Some(inner) => inner,
None => {
let inner = cb();
self.set(Some(&inner));
inner
},
}
}
/// Retrieve a copy of the inner optional `Dom<T>` as `LayoutDom<T>`.
/// For use by layout, which can't use safe types like Temporary.
#[allow(unrooted_must_root)]
pub unsafe fn get_inner_as_layout(&self) -> Option<LayoutDom<T>> {
debug_assert!(thread_state::get().is_layout());
ptr::read(self.ptr.get()).map(|js| js.to_layout())
}
/// Get a rooted value out of this object
#[allow(unrooted_must_root)]
pub fn get(&self) -> Option<DomRoot<T>> {
debug_assert!(thread_state::get().is_script());
unsafe { ptr::read(self.ptr.get()).map(|o| DomRoot::from_ref(&*o)) }
}
/// Set this `MutNullableDom` to the given value.
pub fn set(&self, val: Option<&T>) {
debug_assert!(thread_state::get().is_script());
unsafe {
*self.ptr.get() = val.map(|p| Dom::from_ref(p));
}
}
/// Gets the current value out of this object and sets it to `None`.
pub fn take(&self) -> Option<DomRoot<T>> {
let value = self.get();
self.set(None);
value
}
}
impl<T: DomObject> PartialEq for MutNullableDom<T> {
fn eq(&self, other: &Self) -> bool {
unsafe { *self.ptr.get() == *other.ptr.get() }
}
}
impl<'a, T: DomObject> PartialEq<Option<&'a T>> for MutNullableDom<T> {
fn eq(&self, other: &Option<&T>) -> bool {
unsafe { *self.ptr.get() == other.map(Dom::from_ref) }
}
}
impl<T: DomObject> Default for MutNullableDom<T> {
#[allow(unrooted_must_root)]
fn default() -> MutNullableDom<T> {
debug_assert!(thread_state::get().is_script());
MutNullableDom {
ptr: UnsafeCell::new(None),
}
}
}
impl<T: DomObject> MallocSizeOf for MutNullableDom<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// See comment on MallocSizeOf for Dom<T>.
0
}
}
/// A holder that allows to lazily initialize the value only once
/// `Dom<T>`, using OnceCell
/// Essentially a `OnceCell<Dom<T>>`.
///
/// This should only be used as a field in other DOM objects; see warning
/// on `Dom<T>`.
#[must_root]
pub struct DomOnceCell<T: DomObject> {
ptr: OnceCell<Dom<T>>,
}
impl<T> DomOnceCell<T>
where
T: DomObject,
{
/// Retrieve a copy of the current inner value. If it is `None`, it is
/// initialized with the result of `cb` first.
#[allow(unrooted_must_root)]
pub fn init_once<F>(&self, cb: F) -> &T
where
F: FnOnce() -> DomRoot<T>,
{
debug_assert!(thread_state::get().is_script());
&self.ptr.init_once(|| Dom::from_ref(&cb()))
}
}
impl<T: DomObject> Default for DomOnceCell<T> {
#[allow(unrooted_must_root)]
fn default() -> DomOnceCell<T> {
debug_assert!(thread_state::get().is_script());
DomOnceCell {
ptr: OnceCell::new(),
}
}
}
impl<T: DomObject> MallocSizeOf for DomOnceCell<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// See comment on MallocSizeOf for Dom<T>.
0
}
}
#[allow(unrooted_must_root)]
unsafe impl<T: DomObject> JSTraceable for DomOnceCell<T> {
unsafe fn trace(&self, trc: *mut JSTracer) {
if let Some(ptr) = self.ptr.as_ref() {
ptr.trace(trc);
}
}
}
impl<T: DomObject> LayoutDom<T> {
/// Returns an unsafe pointer to the interior of this JS object. This is
/// the only method that be safely accessed from layout. (The fact that
/// this is unsafe is what necessitates the layout wrappers.)
pub unsafe fn unsafe_get(&self) -> *const T {
debug_assert!(thread_state::get().is_layout());
self.ptr.as_ptr()
}
/// Returns a reference to the interior of this JS object. This method is
/// safe to call because it originates from the layout thread, and it cannot
/// mutate DOM nodes.
pub fn get_for_script(&self) -> &T {
debug_assert!(thread_state::get().is_script());
unsafe { &*self.ptr.as_ptr() }
}
}
/// Helper trait for safer manipulations of `Option<Heap<T>>` values.
pub trait OptionalHeapSetter {
type Value;
/// Update this optional heap value with a new value.
fn set(&mut self, v: Option<Self::Value>);
}
impl<T: GCMethods + Copy> OptionalHeapSetter for Option<Heap<T>>
where
Heap<T>: Default,
{
type Value = T;
fn set(&mut self, v: Option<T>) {
let v = match v {
None => {
*self = None;
return;
},
Some(v) => v,
};
if self.is_none() {
*self = Some(Heap::default());
}
self.as_ref().unwrap().set(v);
}
}<|fim▁end|> |
impl<T> Deref for Root<T> |
<|file_name|>hexView.py<|end_file_name|><|fim▁begin|># DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009 ArxSys
#
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Jeremy Mounier <[email protected]>
#
from modules.viewer.hexedit.hexItem import *
from modules.viewer.hexedit.offsetItem import *
from modules.viewer.hexedit.asciiItem import *
from modules.viewer.hexedit.scrollbar import hexScrollBar
from PyQt4.QtCore import Qt, QLineF
from PyQt4.QtGui import QGraphicsView, QKeySequence, QHBoxLayout, QWidget, QFont, QGraphicsScene, QGraphicsLineItem, QGraphicsTextItem
class wHex(QWidget):
def __init__(self, parent):
QWidget.__init__(self)
self.init(parent)
self.initShape()
self.initMode()
def init(self, parent):
self.heditor = parent
def initShape(self):
self.hbox = QHBoxLayout()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.view = hexView(self)
self.scroll = hexScrollBar(self)<|fim▁hole|> self.hexitem = hexItem(self)
self.offsetitem = offsetItem(self)
self.asciitem = asciiItem(self)
self.hexcursor = hexCursor(self)
self.asciicursor = asciiCursor(self)
self.view.setItems()
self.view.setCursors()
self.hbox.addWidget(self.view)
self.hbox.addWidget(self.scroll)
self.setLayout(self.hbox)
#Set Long File Mode
def initMode(self):
self.lfmod = False
self.maxint = 2147483647
self.lines = self.heditor.filesize / self.heditor.bytesPerLine
if self.isInt(self.lines):
self.scroll.max = self.lines - 1
else:
self.lfmod = True
self.scroll.max = self.maxint - 1
self.scroll.setValues()
def offsetToValue(self, offset):
if self.isLFMOD():
# print (self.maxint * offset) / self.heditor.filesize
return ((self.maxint * offset) / self.heditor.filesize)
else:
return (offset / self.heditor.bytesPerLine)
def isLFMOD(self):
return self.lfmod
def isInt(self, val):
try:
res = int(val)
if res < 2147483647:
return True
else:
return False
except ValueError, TypeError:
return False
else:
return False
class hexView(QGraphicsView):
def __init__(self, parent):
QGraphicsView.__init__(self, None, parent)
self.init(parent)
self.initShape()
def init(self, parent):
self.whex = parent
self.heditor = self.whex.heditor
#Init scene
self.__scene = QGraphicsScene(self)
self.setScene(self.__scene)
#Get heditor stuff
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setAlignment(Qt.AlignLeft)
def setItems(self):
self.__scene.addItem(self.whex.offsetitem)
self.__scene.addItem(self.whex.hexitem)
self.__scene.addItem(self.whex.asciitem)
def initShape(self):
self.initHeads()
#Line decoration
offsetLine = QGraphicsLineItem(QLineF(90, 0, 90, 700))
asciiLine = QGraphicsLineItem(QLineF(480, 0, 480, 700))
#Add to scene
self.__scene.addItem(offsetLine)
self.__scene.addItem(asciiLine)
def setCursors(self):
self.__scene.addItem(self.whex.hexcursor)
self.__scene.addItem(self.whex.asciicursor)
def initHeads(self):
self.offHead = QGraphicsTextItem()
self.hexHead = QGraphicsTextItem()
self.asciiHead = QGraphicsTextItem()
#Set Color
self.offHead.setDefaultTextColor(QColor(Qt.red))
self.hexHead.setDefaultTextColor(QColor(Qt.black))
self.asciiHead.setDefaultTextColor(QColor(Qt.darkCyan))
#Create Font
self.font = QFont("Gothic")
self.font.setFixedPitch(1)
self.font.setBold(False)
self.font.setPixelSize(14)
#Set Font
self.offHead.setFont(self.font)
self.hexHead.setFont(self.font)
self.asciiHead.setFont(self.font)
#Set Text
self.offHead.setPlainText("Offset")
self.hexHead.setPlainText("0 1 2 3 4 5 6 7 8 9 A B C D E F")
self.asciiHead.setPlainText("Ascii")
#Position
self.offHead.setPos(20, 0)
self.hexHead.setPos(95, 0)
self.asciiHead.setPos(520, 0)
#Add to scene
self.__scene.addItem(self.offHead)
self.__scene.addItem(self.hexHead)
self.__scene.addItem(self.asciiHead)
headLine = QGraphicsLineItem(QLineF(0, 20, 615, 20))
self.__scene.addItem(headLine)
def move(self, step, way):
#step: line = 1 * bytesPerLine, page = pagesize, wheel = 3 * bytesPerLine
offset = self.heditor.currentOffset
if way == 0:
#UP
if (offset - (step * self.heditor.bytesPerLine)) >= 0:
self.heditor.readOffset(offset - (step * self.heditor.bytesPerLine))
if self.whex.isLFMOD():
self.whex.scroll.setValue(self.whex.offsetToValue(offset - step * (self.heditor.bytesPerLine)))
else:
self.whex.scroll.setValue(self.whex.scroll.value() - step)
else:
self.heditor.readOffset(0)
self.whex.scroll.setValue(0)
elif way == 1:
#Down
if (offset + (step * self.heditor.bytesPerLine)) <= (self.heditor.filesize - (step * self.heditor.bytesPerLine)):
self.heditor.readOffset(offset + (step * self.heditor.bytesPerLine))
if self.whex.isLFMOD():
self.whex.scroll.setValue(self.whex.offsetToValue(offset + step * (self.heditor.bytesPerLine)))
else:
self.whex.scroll.setValue(self.whex.scroll.value() + step)
else:
self.heditor.readOffset(self.heditor.filesize - 5 * (self.heditor.bytesPerLine))
self.whex.scroll.setValue(self.whex.scroll.max)
####################################
# Navigation Operations #
####################################
def wheelEvent(self, event):
offset = self.heditor.currentOffset
if event.delta() > 0:
self.move(3, 0)
else:
self.move(3, 1)
def keyPressEvent(self, keyEvent):
# off = self.heditor.currentOffset
if keyEvent.matches(QKeySequence.MoveToNextPage):
self.move(self.heditor.pageSize / self.heditor.bytesPerLine, 1)
elif keyEvent.matches(QKeySequence.MoveToPreviousPage):
self.move(self.heditor.pageSize / self.heditor.bytesPerLine, 0)
elif keyEvent.matches(QKeySequence.MoveToNextWord):
print "Next Word"
elif keyEvent.matches(QKeySequence.MoveToPreviousWord):
print "Previous word"
elif keyEvent.matches(QKeySequence.MoveToNextLine):
print "Next Line"
elif keyEvent.matches(QKeySequence.MoveToPreviousLine):
print "Previous Line"<|fim▁end|> |
#Init Items |
<|file_name|>cluster.rs<|end_file_name|><|fim▁begin|>//! This modules contains an implementation of [r2d2](https://github.com/sfackler/r2d2)<|fim▁hole|>use std::iter::Iterator;
use query::QueryBuilder;
use client::{CDRS, Session};
use error::{Error as CError, Result as CResult};
use authenticators::Authenticator;
use compression::Compression;
use r2d2;
use transport::CDRSTransport;
use rand;
use std::sync::atomic::{AtomicUsize, Ordering};
/// Load balancing strategy
#[derive(PartialEq)]
pub enum LoadBalancingStrategy {
/// Round Robin balancing strategy
RoundRobin,
/// Random balancing strategy
Random,
}
impl LoadBalancingStrategy {
/// Returns next value for selected load balancing strategy
pub fn next<'a, N>(&'a self, nodes: &'a Vec<N>, i: usize) -> Option<&N> {
match *self {
LoadBalancingStrategy::Random => nodes.get(self.rnd_idx((0, Some(nodes.len())))),
LoadBalancingStrategy::RoundRobin => {
let mut cycle = nodes.iter().cycle().skip(i);
cycle.next()
}
}
}
/// Returns random number from a range
fn rnd_idx(&self, bounds: (usize, Option<usize>)) -> usize {
let min = bounds.0;
let max = bounds.1.unwrap_or(u8::max_value() as usize);
let rnd = rand::random::<usize>();
rnd % (max - min) + min
}
}
/// Load balancer
///
/// #Example
///
/// ```no_run
/// use cdrs::cluster::{LoadBalancingStrategy, LoadBalancer};
/// use cdrs::transport::TransportTcp;
/// let transports = vec![TransportTcp::new("127.0.0.1:9042"), TransportTcp::new("127.0.0.1:9042")];
/// let load_balancer = LoadBalancer::new(transports, LoadBalancingStrategy::RoundRobin);
/// let node = load_balancer.next().unwrap();
/// ```
pub struct LoadBalancer<T> {
strategy: LoadBalancingStrategy,
nodes: Vec<T>,
i: AtomicUsize,
}
impl<T> LoadBalancer<T> {
/// Factory function which creates new `LoadBalancer` with provided strategy.
pub fn new(nodes: Vec<T>, strategy: LoadBalancingStrategy) -> LoadBalancer<T> {
LoadBalancer {
nodes: nodes,
strategy: strategy,
i: AtomicUsize::new(0),
}
}
/// Returns next node basing on provided strategy.
pub fn next(&self) -> Option<&T> {
let next = self.strategy
.next(&self.nodes, self.i.load(Ordering::Relaxed) as usize);
if self.strategy == LoadBalancingStrategy::RoundRobin {
self.i.fetch_add(1, Ordering::Relaxed);
// prevent overflow
let i = self.i.load(Ordering::Relaxed);
match i.checked_rem(self.nodes.len() as usize) {
Some(rem) => self.i.store(rem, Ordering::Relaxed),
None => return None,
}
}
next
}
}
/// [r2d2](https://github.com/sfackler/r2d2) `ManageConnection`.
pub struct ClusterConnectionManager<T, X> {
load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression,
}
impl<T, X> ClusterConnectionManager<T, X>
where T: Authenticator + Send + Sync + 'static
{
/// Creates a new instance of `ConnectionManager`.
/// It requires transport, authenticator and compression as inputs.
pub fn new(load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression)
-> ClusterConnectionManager<T, X> {
ClusterConnectionManager {
load_balancer: load_balancer,
authenticator: authenticator,
compression: compression,
}
}
}
impl<T: Authenticator + Send + Sync + 'static,
X: CDRSTransport + Send + Sync + 'static> r2d2::ManageConnection
for ClusterConnectionManager<T, X> {
type Connection = Session<T, X>;
type Error = CError;
fn connect(&self) -> Result<Self::Connection, Self::Error> {
let transport_res: CResult<X> = self.load_balancer
.next()
.ok_or_else(|| "Cannot get next node".into())
.and_then(|x| x.try_clone().map_err(|e| e.into()));
let transport = try!(transport_res);
let compression = self.compression;
let cdrs = CDRS::new(transport, self.authenticator.clone());
cdrs.start(compression)
}
fn is_valid(&self, connection: &mut Self::Connection) -> Result<(), Self::Error> {
let query = QueryBuilder::new("SELECT * FROM system.peers;").finalize();
connection.query(query, false, false).map(|_| ())
}
fn has_broken(&self, _connection: &mut Self::Connection) -> bool {
false
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn round_robin() {
let nodes = vec!["a", "b", "c"];
let nodes_c = nodes.clone();
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::RoundRobin);
for i in 0..10 {
assert_eq!(&nodes_c[i % 3], load_balancer.next().unwrap());
}
}
#[test]
fn lb_random() {
let nodes = vec!["a", "b", "c", "d", "e", "f", "g"];
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::Random);
for _ in 0..100 {
let s = load_balancer.next();
assert!(s.is_some());
}
}
}<|fim▁end|> | //! functionality of connection pools. To get more details about creating r2d2 pools
//! please refer to original documentation. |
<|file_name|>convert.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::borrow::Cow;
use std::fmt::Display;
use std::{self, char, i16, i32, i64, i8, str, u16, u32, u64, u8};
// use crate::{self, FieldTypeTp, UNSPECIFIED_LENGTH};
use crate::{Collation, FieldTypeAccessor};
use crate::{FieldTypeTp, UNSPECIFIED_LENGTH};
use tipb::FieldType;
use super::mysql::{RoundMode, DEFAULT_FSP};
use super::{Error, Result};
use crate::codec::data_type::*;
use crate::codec::error::ERR_DATA_OUT_OF_RANGE;
use crate::codec::mysql::decimal::max_or_min_dec;
use crate::codec::mysql::{charset, Res};
use crate::expr::EvalContext;
use crate::expr::Flag;
/// A trait for converting a value to an `Int`.
pub trait ToInt {
/// Converts the given value to an `i64`
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64>;
/// Converts the given value to an `u64`
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64>;
}
/// A trait for converting a value to `T`
pub trait ConvertTo<T> {
/// Converts the given value to `T` value
fn convert(&self, ctx: &mut EvalContext) -> Result<T>;
}
pub trait ConvertFrom<T>: Sized {
/// Converts the given value from `T` value
fn convert_from(ctx: &mut EvalContext, from: T) -> Result<Self>;
}
impl<V, W: ConvertTo<V>> ConvertFrom<W> for V {
fn convert_from(ctx: &mut EvalContext, from: W) -> Result<Self> {
from.convert(ctx)
}
}
impl<T> ConvertTo<i64> for T
where
T: ToInt,
{
#[inline]
fn convert(&self, ctx: &mut EvalContext) -> Result<i64> {
self.to_int(ctx, FieldTypeTp::LongLong)
}
}
impl<T> ConvertTo<u64> for T
where
T: ToInt,
{
#[inline]
fn convert(&self, ctx: &mut EvalContext) -> Result<u64> {
self.to_uint(ctx, FieldTypeTp::LongLong)
}
}
impl<T> ConvertTo<Real> for T
where
T: ConvertTo<f64> + EvaluableRet,
{
#[inline]
fn convert(&self, ctx: &mut EvalContext) -> Result<Real> {
let val = self.convert(ctx)?;
let val = box_try!(Real::new(val));
Ok(val)
}
}
impl<T> ConvertTo<String> for T
where
T: ToString + EvaluableRet,
{
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<String> {
// FIXME: There is an additional step `ProduceStrWithSpecifiedTp` in TiDB.
Ok(self.to_string())
}
}
impl<T> ConvertTo<Bytes> for T
where
T: ToString + EvaluableRet,
{
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<Bytes> {
Ok(self.to_string().into_bytes())
}
}
impl<'a> ConvertTo<Real> for JsonRef<'a> {
#[inline]
fn convert(&self, ctx: &mut EvalContext) -> Result<Real> {
let val = self.convert(ctx)?;
let val = box_try!(Real::new(val));
Ok(val)
}
}
impl<'a> ConvertTo<String> for JsonRef<'a> {
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<String> {
// FIXME: There is an additional step `ProduceStrWithSpecifiedTp` in TiDB.
Ok(self.to_string())
}
}
impl<'a> ConvertTo<Bytes> for JsonRef<'a> {
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<Bytes> {
Ok(self.to_string().into_bytes())
}
}
/// Returns the max u64 values of different mysql types
///
/// # Panics
///
/// Panics if the `tp` is not one of `FieldTypeTp::Tiny`, `FieldTypeTp::Short`,
/// `FieldTypeTp::Int24`, `FieldTypeTp::Long`, `FieldTypeTp::LongLong`,
/// `FieldTypeTp::Bit`, `FieldTypeTp::Set`, `FieldTypeTp::Enum`
#[inline]
pub fn integer_unsigned_upper_bound(tp: FieldTypeTp) -> u64 {
match tp {
FieldTypeTp::Tiny => u64::from(u8::MAX),
FieldTypeTp::Short => u64::from(u16::MAX),
FieldTypeTp::Int24 => (1 << 24) - 1,
FieldTypeTp::Long => u64::from(u32::MAX),
FieldTypeTp::LongLong | FieldTypeTp::Bit | FieldTypeTp::Set | FieldTypeTp::Enum => u64::MAX,
_ => panic!("input bytes is not a mysql type: {}", tp),
}
}
/// Returns the max i64 values of different mysql types
///
/// # Panics
///
/// Panics if the `tp` is not one of `FieldTypeTp::Tiny`, `FieldTypeTp::Short`,
/// `FieldTypeTp::Int24`, `FieldTypeTp::Long`, `FieldTypeTp::LongLong`,
#[inline]
pub fn integer_signed_upper_bound(tp: FieldTypeTp) -> i64 {
match tp {
FieldTypeTp::Tiny => i64::from(i8::MAX),
FieldTypeTp::Short => i64::from(i16::MAX),
FieldTypeTp::Int24 => (1 << 23) - 1,
FieldTypeTp::Long => i64::from(i32::MAX),
FieldTypeTp::LongLong => i64::MAX,
_ => panic!("input bytes is not a mysql type: {}", tp),
}
}
/// Returns the min i64 values of different mysql types
///
/// # Panics
///
/// Panics if the `tp` is not one of `FieldTypeTp::Tiny`, `FieldTypeTp::Short`,
/// `FieldTypeTp::Int24`, `FieldTypeTp::Long`, `FieldTypeTp::LongLong`,
#[inline]
pub fn integer_signed_lower_bound(tp: FieldTypeTp) -> i64 {
match tp {
FieldTypeTp::Tiny => i64::from(i8::MIN),
FieldTypeTp::Short => i64::from(i16::MIN),
FieldTypeTp::Int24 => -1i64 << 23,
FieldTypeTp::Long => i64::from(i32::MIN),
FieldTypeTp::LongLong => i64::MIN,
_ => panic!("input bytes is not a mysql type: {}", tp),
}
}
/// `truncate_binary` truncates a buffer to the specified length.
#[inline]
pub fn truncate_binary(s: &mut Vec<u8>, flen: isize) {
if flen != crate::UNSPECIFIED_LENGTH as isize && s.len() > flen as usize {
s.truncate(flen as usize);
}
}
/// `truncate_f64` (`TruncateFloat` in TiDB) tries to truncate f.
/// If the result exceeds the max/min float that flen/decimal
/// allowed, returns the max/min float allowed.
pub fn truncate_f64(mut f: f64, flen: u8, decimal: u8) -> Res<f64> {
if f.is_nan() {
return Res::Overflow(0f64);
}
let shift = 10f64.powi(i32::from(decimal));
let max_f = 10f64.powi(i32::from(flen - decimal)) - 1.0 / shift;
if f.is_finite() {
let tmp = f * shift;
if tmp.is_finite() {
f = tmp.round() / shift
}
}
if f > max_f {
return Res::Overflow(max_f);
}
if f < -max_f {
return Res::Overflow(-max_f);
}
Res::Ok(f)
}
/// Returns an overflowed error.
#[inline]
fn overflow(val: impl Display, bound: FieldTypeTp) -> Error {
Error::Eval(
format!("constant {} overflows {}", val, bound),
ERR_DATA_OUT_OF_RANGE,
)
}
impl ToInt for i64 {
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
let lower_bound = integer_signed_lower_bound(tp);
// https://dev.mysql.com/doc/refman/8.0/en/out-of-range-and-overflow.html
if *self < lower_bound {
ctx.handle_overflow_err(overflow(self, tp))?;
return Ok(lower_bound);
}
let upper_bound = integer_signed_upper_bound(tp);
if *self > upper_bound {
ctx.handle_overflow_err(overflow(self, tp))?;
return Ok(upper_bound);
}
Ok(*self)
}
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
if *self < 0 && ctx.should_clip_to_zero() {
ctx.handle_overflow_err(overflow(self, tp))?;
return Ok(0);
}
let upper_bound = integer_unsigned_upper_bound(tp);
if *self as u64 > upper_bound {
ctx.handle_overflow_err(overflow(self, tp))?;
return Ok(upper_bound);
}
Ok(*self as u64)
}
}
impl ToInt for u64 {
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
let upper_bound = integer_signed_upper_bound(tp);
if *self > upper_bound as u64 {
ctx.handle_overflow_err(overflow(self, tp))?;
return Ok(upper_bound);
}
Ok(*self as i64)
}
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let upper_bound = integer_unsigned_upper_bound(tp);
if *self > upper_bound {
ctx.handle_overflow_err(overflow(self, tp))?;
return Ok(upper_bound);
}
Ok(*self)
}
}
impl ToInt for f64 {
/// This function is ported from TiDB's types.ConvertFloatToInt,
/// which checks whether the number overflows the signed lower and upper boundaries of `tp`
///
/// # Notes
///
/// It handles overflows using `ctx` so that the caller would not handle it anymore.
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
#![allow(clippy::float_cmp)]
let val = (*self).round();
let lower_bound = integer_signed_lower_bound(tp);
if val < lower_bound as f64 {
ctx.handle_overflow_err(overflow(val, tp))?;
return Ok(lower_bound);
}
let upper_bound = integer_signed_upper_bound(tp);
let ub_f64 = upper_bound as f64;
// according to https://github.com/pingcap/tidb/pull/5247
if val >= ub_f64 {
if val == ub_f64 {
return Ok(upper_bound);
} else {
ctx.handle_overflow_err(overflow(val, tp))?;
return Ok(upper_bound);
}
}
Ok(val as i64)
}
/// This function is ported from TiDB's types.ConvertFloatToUint,
/// which checks whether the number overflows the unsigned upper boundaries of `tp`
///
/// # Notes
///
/// It handles overflows using `ctx` so that the caller would not handle it anymore.
#[allow(clippy::float_cmp)]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let val = (*self).round();
if val < 0f64 {
ctx.handle_overflow_err(overflow(val, tp))?;
if ctx.should_clip_to_zero() {
return Ok(0);
} else {
// recall that, `f64 as u64` is different from `f64 as i64 as u64`
return Ok(val as i64 as u64);
}
}
let upper_bound = integer_unsigned_upper_bound(tp);
if val > upper_bound as f64 {
ctx.handle_overflow_err(overflow(val, tp))?;
Ok(upper_bound)
} else if val == upper_bound as f64 {
// Because u64::MAX can not be represented precisely in iee754(64bit),
// so u64::MAX as f64 will make a num bigger than u64::MAX,
// which can not be represented by 64bit integer.
// So (u64::MAX as f64) as u64 is undefined behavior.
Ok(upper_bound)
} else {
Ok(val as u64)
}
}
}
impl ToInt for Real {
#[inline]
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
self.into_inner().to_int(ctx, tp)
}
#[inline]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
self.into_inner().to_uint(ctx, tp)
}
}
impl ToInt for &[u8] {
/// Port from TiDB's types.StrToInt
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
let s = get_valid_utf8_prefix(ctx, self)?;
let s = s.trim();
let vs = get_valid_int_prefix(ctx, s)?;
let val = vs.parse::<i64>();
match val {
Ok(val) => val.to_int(ctx, tp),
Err(_) => {
ctx.handle_overflow_err(Error::overflow("BIGINT", &vs))?;
// To make compatible with TiDB,
// return signed upper bound or lower bound when overflow.
// see TiDB's `types.StrToInt` and [strconv.ParseInt](https://golang.org/pkg/strconv/#ParseInt)
let val = if vs.starts_with('-') {
integer_signed_lower_bound(tp)
} else {
integer_signed_upper_bound(tp)
};
Ok(val)
}
}
}
/// Port from TiDB's types.StrToUint
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let s = get_valid_utf8_prefix(ctx, self)?;
let s = s.trim();
let s = get_valid_int_prefix(ctx, s)?;
// in TiDB, it use strconv.ParseUint here,
// strconv.ParseUint will return 0 and a err if the str is neg
if s.starts_with('-') {
ctx.handle_overflow_err(Error::overflow("BIGINT UNSIGNED", s))?;
return Ok(0);
}
let val = s.parse::<u64>();
match val {
Ok(val) => val.to_uint(ctx, tp),
Err(_) => {
ctx.handle_overflow_err(Error::overflow("BIGINT UNSIGNED", s))?;
// To make compatible with TiDB,
// return `integer_unsigned_upper_bound(tp);` when overflow.
// see TiDB's `types.StrToUint` and [strconv.ParseUint](https://golang.org/pkg/strconv/#ParseUint)
let val = integer_unsigned_upper_bound(tp);
Ok(val)
}
}
}
}
impl ToInt for std::borrow::Cow<'_, [u8]> {
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
self.as_ref().to_int(ctx, tp)
}
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
self.as_ref().to_uint(ctx, tp)
}
}
impl ToInt for Bytes {
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
self.as_slice().to_int(ctx, tp)
}
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
self.as_slice().to_uint(ctx, tp)
}
}
impl ToInt for Decimal {
#[inline]
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
let dec = round_decimal_with_ctx(ctx, *self)?;
let val = dec.as_i64();
let err = Error::truncated_wrong_val("DECIMAL", &dec);
let r = val.into_result_with_overflow_err(ctx, err)?;
r.to_int(ctx, tp)
}
#[inline]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let dec = round_decimal_with_ctx(ctx, *self)?;
let val = dec.as_u64();
let err = Error::truncated_wrong_val("DECIMAL", &dec);
let r = val.into_result_with_overflow_err(ctx, err)?;
r.to_uint(ctx, tp)
}
}
impl ToInt for DateTime {
// FiXME
// Time::parse_utc_datetime("2000-01-01T12:13:14.6666", 4).unwrap().round_frac(DEFAULT_FSP)
// will get 2000-01-01T12:13:14, this is a bug
#[inline]
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
let t = self.round_frac(ctx, DEFAULT_FSP)?;
let dec: Decimal = t.convert(ctx)?;
let val = dec.as_i64();
let val = val.into_result(ctx)?;
val.to_int(ctx, tp)
}
#[inline]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let t = self.round_frac(ctx, DEFAULT_FSP)?;
let dec: Decimal = t.convert(ctx)?;
decimal_as_u64(ctx, dec, tp)
}
}
impl ToInt for Duration {
#[inline]
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
let dur = (*self).round_frac(DEFAULT_FSP)?;
let dec: Decimal = dur.convert(ctx)?;
let val = dec.as_i64_with_ctx(ctx)?;
val.to_int(ctx, tp)
}
#[inline]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let dur = (*self).round_frac(DEFAULT_FSP)?;
let dec: Decimal = dur.convert(ctx)?;
decimal_as_u64(ctx, dec, tp)
}
}
impl ToInt for Json {
#[inline]
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
self.as_ref().to_int(ctx, tp)
}
#[inline]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
self.as_ref().to_uint(ctx, tp)
}
}
impl<'a> ToInt for JsonRef<'a> {
// Port from TiDB's types.ConvertJSONToInt
#[inline]
fn to_int(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<i64> {
// Casts json to int has different behavior in TiDB/MySQL when the json
// value is a `Json::from_f64` and we will keep compatible with TiDB
// **Note**: select cast(cast('4.5' as json) as signed)
// TiDB: 5
// MySQL: 4
let val = match self.get_type() {
JsonType::Object | JsonType::Array => Ok(ctx
.handle_truncate_err(Error::truncated_wrong_val("Integer", self.to_string()))
.map(|_| 0)?),
JsonType::Literal => Ok(self.get_literal().map_or(0, |x| x as i64)),
JsonType::I64 => Ok(self.get_i64()),
JsonType::U64 => Ok(self.get_u64() as i64),
JsonType::Double => self.get_double().to_int(ctx, tp),
JsonType::String => self.get_str_bytes()?.to_int(ctx, tp),
}?;
val.to_int(ctx, tp)
}
// Port from TiDB's types.ConvertJSONToInt
#[inline]
fn to_uint(&self, ctx: &mut EvalContext, tp: FieldTypeTp) -> Result<u64> {
let val = match self.get_type() {
JsonType::Object | JsonType::Array => Ok(ctx
.handle_truncate_err(Error::truncated_wrong_val("Integer", self.to_string()))
.map(|_| 0)?),
JsonType::Literal => Ok(self.get_literal().map_or(0, |x| x as u64)),
JsonType::I64 => Ok(self.get_i64() as u64),
JsonType::U64 => Ok(self.get_u64()),
JsonType::Double => self.get_double().to_uint(ctx, tp),
JsonType::String => self.get_str_bytes()?.to_uint(ctx, tp),
}?;
val.to_uint(ctx, tp)
}
}
#[inline]
pub fn get_valid_utf8_prefix<'a>(ctx: &mut EvalContext, bytes: &'a [u8]) -> Result<&'a str> {
let valid = match str::from_utf8(bytes) {
Ok(s) => s,
Err(err) => {
ctx.handle_truncate(true)?;
let (valid, _) = bytes.split_at(err.valid_up_to());
unsafe { str::from_utf8_unchecked(valid) }
}<|fim▁hole|> Ok(valid)
}
fn round_decimal_with_ctx(ctx: &mut EvalContext, dec: Decimal) -> Result<Decimal> {
dec.round(0, RoundMode::HalfEven)
.into_result_with_overflow_err(ctx, Error::overflow("DECIMAL", ""))
}
#[inline]
fn decimal_as_u64(ctx: &mut EvalContext, dec: Decimal, tp: FieldTypeTp) -> Result<u64> {
dec.as_u64()
.into_result_with_overflow_err(ctx, Error::overflow("DECIMAL", dec))?
.to_uint(ctx, tp)
}
/// `bytes_to_int_without_context` converts a byte arrays to an i64
/// in best effort, but without context.
pub fn bytes_to_int_without_context(bytes: &[u8]) -> Result<i64> {
// trim
let mut trimed = bytes.iter().skip_while(|&&b| b == b' ' || b == b'\t');
let mut negative = false;
let mut r = Some(0i64);
if let Some(&c) = trimed.next() {
if c == b'-' {
negative = true;
} else if c >= b'0' && c <= b'9' {
r = Some(i64::from(c) - i64::from(b'0'));
} else if c != b'+' {
return Ok(0);
}
for c in trimed.take_while(|&&c| c >= b'0' && c <= b'9') {
let cur = i64::from(*c - b'0');
r = r.and_then(|r| r.checked_mul(10)).and_then(|r| {
if negative {
r.checked_sub(cur)
} else {
r.checked_add(cur)
}
});
if r.is_none() {
break;
}
}
}
r.ok_or_else(|| Error::overflow("BIGINT", ""))
}
/// `bytes_to_uint_without_context` converts a byte arrays to an iu64
/// in best effort, but without context.
pub fn bytes_to_uint_without_context(bytes: &[u8]) -> Result<u64> {
// trim
let mut trimed = bytes.iter().skip_while(|&&b| b == b' ' || b == b'\t');
let mut r = Some(0u64);
if let Some(&c) = trimed.next() {
if c >= b'0' && c <= b'9' {
r = Some(u64::from(c) - u64::from(b'0'));
} else if c != b'+' {
return Ok(0);
}
for c in trimed.take_while(|&&c| c >= b'0' && c <= b'9') {
r = r
.and_then(|r| r.checked_mul(10))
.and_then(|r| r.checked_add(u64::from(*c - b'0')));
if r.is_none() {
break;
}
}
}
r.ok_or_else(|| Error::overflow("BIGINT UNSIGNED", ""))
}
pub fn produce_dec_with_specified_tp(
ctx: &mut EvalContext,
mut dec: Decimal,
ft: &FieldType,
) -> Result<Decimal> {
let (flen, decimal) = (ft.as_accessor().flen(), ft.as_accessor().decimal());
if flen != UNSPECIFIED_LENGTH && decimal != UNSPECIFIED_LENGTH {
if flen < decimal {
return Err(Error::m_bigger_than_d(""));
}
let (prec, frac) = dec.prec_and_frac();
let (prec, frac) = (prec as isize, frac as isize);
if !dec.is_zero() && prec - frac > flen - decimal {
// select (cast 111 as decimal(1)) causes a warning in MySQL.
ctx.handle_overflow_err(Error::overflow(
"Decimal",
&format!("({}, {})", flen, decimal),
))?;
dec = max_or_min_dec(dec.is_negative(), flen as u8, decimal as u8)
} else if frac != decimal {
let old = dec;
let rounded = dec
.round(decimal as i8, RoundMode::HalfEven)
.into_result_with_overflow_err(
ctx,
Error::overflow("Decimal", &format!("({}, {})", flen, decimal)),
)?;
if !rounded.is_zero() && frac > decimal && rounded != old {
if ctx.cfg.flag.contains(Flag::IN_INSERT_STMT)
|| ctx.cfg.flag.contains(Flag::IN_UPDATE_OR_DELETE_STMT)
{
ctx.warnings.append_warning(Error::truncated());
} else {
// although according to tidb,
// we should handler overflow after handle_truncate,
// however, no overflow err will return by handle_truncate
ctx.handle_truncate(true)?;
}
}
dec = rounded
}
};
if ft.is_unsigned() && dec.is_negative() {
Ok(Decimal::zero())
} else {
Ok(dec)
}
}
/// `produce_float_with_specified_tp`(`ProduceFloatWithSpecifiedTp` in TiDB) produces
/// a new float64 according to `flen` and `decimal` in `self.tp`.
/// TODO port tests from TiDB(TiDB haven't implemented now)
pub fn produce_float_with_specified_tp(
ctx: &mut EvalContext,
tp: &FieldType,
num: f64,
) -> Result<f64> {
let flen = tp.as_accessor().flen();
let decimal = tp.as_accessor().decimal();
let ul = crate::UNSPECIFIED_LENGTH;
let res = if flen != ul && decimal != ul {
assert!(flen < std::u8::MAX as isize && decimal < std::u8::MAX as isize);
let r = truncate_f64(num, flen as u8, decimal as u8);
r.into_result_with_overflow_err(ctx, Error::overflow(num, "DOUBLE"))?
} else {
num
};
if tp.is_unsigned() && res < 0f64 {
ctx.handle_overflow_err(overflow(res, tp.as_accessor().tp()))?;
return Ok(0f64);
}
Ok(res)
}
/// `produce_str_with_specified_tp`(`ProduceStrWithSpecifiedTp` in TiDB) produces
/// a new string according to `flen` and `chs`.
///
/// # Panics
///
/// The s must represent a valid str, otherwise, panic!
pub fn produce_str_with_specified_tp<'a>(
ctx: &mut EvalContext,
s: Cow<'a, [u8]>,
ft: &FieldType,
pad_zero: bool,
) -> Result<Cow<'a, [u8]>> {
let (flen, chs) = (ft.flen(), ft.get_charset());
if flen < 0 {
return Ok(s);
}
let flen = flen as usize;
// flen is the char length, not byte length, for UTF8 charset, we need to calculate the
// char count and truncate to flen chars if it is too long.
if chs == charset::CHARSET_UTF8 || chs == charset::CHARSET_UTF8MB4 {
let truncate_info = {
// In TiDB's version, the param `s` is a string,
// so we can unwrap directly here because we need the `s` represent a valid str
let s: &str = std::str::from_utf8(s.as_ref()).unwrap();
let mut indices = s.char_indices().skip(flen);
indices.next().map(|(truncate_pos, _)| {
let char_count = flen + 1 + indices.count();
(char_count, truncate_pos)
})
};
if truncate_info.is_none() {
return Ok(s);
}
let (char_count, truncate_pos) = truncate_info.unwrap();
ctx.handle_truncate_err(Error::data_too_long(format!(
"Data Too Long, field len {}, data len {}",
flen, char_count
)))?;
let mut res = s.into_owned();
truncate_binary(&mut res, truncate_pos as isize);
Ok(Cow::Owned(res))
} else if s.len() > flen {
ctx.handle_truncate_err(Error::data_too_long(format!(
"Data Too Long, field len {}, data len {}",
flen,
s.len()
)))?;
let mut res = s.into_owned();
truncate_binary(&mut res, flen as isize);
Ok(Cow::Owned(res))
} else if ft.as_accessor().tp() == FieldTypeTp::String
&& s.len() < flen
&& ft.is_binary_string_like()
&& pad_zero
{
let mut s = s.into_owned();
s.resize(flen, 0);
Ok(Cow::Owned(s))
} else {
Ok(s)
}
}
pub fn pad_zero_for_binary_type(s: &mut Vec<u8>, ft: &FieldType) {
let flen = ft.flen();
if flen < 0 {
return;
}
let flen = flen as usize;
if ft.as_accessor().tp() == FieldTypeTp::String
&& ft
.as_accessor()
.collation()
.map(|col| col == Collation::Binary)
.unwrap_or(false)
&& s.len() < flen
{
// it seems MaxAllowedPacket has not push down to tikv, so we needn't to handle it
s.resize(flen, 0);
}
}
impl ConvertTo<f64> for i64 {
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<f64> {
Ok(*self as f64)
}
}
impl ConvertTo<f64> for u64 {
#[inline]
fn convert(&self, _: &mut EvalContext) -> Result<f64> {
Ok(*self as f64)
}
}
impl ConvertTo<f64> for &[u8] {
/// This function parse the str to float,
/// if the num represent by the str is too large,
/// it will handle truncated using ctx,
/// and return f64::MIN or f64::MAX according to whether isNeg of the str
///
/// Port from TiDB's types.StrToFloat
fn convert(&self, ctx: &mut EvalContext) -> Result<f64> {
let s = str::from_utf8(self)?.trim();
let vs = get_valid_float_prefix(ctx, s)?;
let val = vs
.parse::<f64>()
.map_err(|err| -> Error { box_err!("Parse '{}' to float err: {:?}", vs, err) })?;
// The `parse` will return Ok(inf) if the float string literal out of range
if val.is_infinite() {
ctx.handle_truncate_err(Error::truncated_wrong_val("DOUBLE", &vs))?;
if val.is_sign_negative() {
return Ok(std::f64::MIN);
} else {
return Ok(std::f64::MAX);
}
}
Ok(val)
}
}
impl ConvertTo<f64> for std::borrow::Cow<'_, [u8]> {
#[inline]
fn convert(&self, ctx: &mut EvalContext) -> Result<f64> {
self.as_ref().convert(ctx)
}
}
impl ConvertTo<f64> for Bytes {
#[inline]
fn convert(&self, ctx: &mut EvalContext) -> Result<f64> {
self.as_slice().convert(ctx)
}
}
pub fn get_valid_int_prefix<'a>(ctx: &mut EvalContext, s: &'a str) -> Result<Cow<'a, str>> {
if !ctx.cfg.flag.contains(Flag::IN_SELECT_STMT) {
let vs = get_valid_float_prefix(ctx, s)?;
float_str_to_int_string(ctx, vs)
} else {
let mut valid_len = 0;
for (i, c) in s.chars().enumerate() {
if (c == '+' || c == '-') && i == 0 {
continue;
}
if c >= '0' && c <= '9' {
valid_len = i + 1;
continue;
}
break;
}
let mut valid = &s[..valid_len];
if valid == "" {
valid = "0";
}
if valid_len == 0 || valid_len < s.len() {
ctx.handle_truncate_err(Error::truncated_wrong_val("INTEGER", s))?;
}
Ok(Cow::Borrowed(valid))
}
}
pub fn get_valid_float_prefix<'a>(ctx: &mut EvalContext, s: &'a str) -> Result<&'a str> {
let mut saw_dot = false;
let mut saw_digit = false;
let mut valid_len = 0;
let mut e_idx = 0;
for (i, c) in s.chars().enumerate() {
if c == '+' || c == '-' {
if i != 0 && (e_idx == 0 || i != e_idx + 1) {
// "1e+1" is valid.
break;
}
} else if c == '.' {
if saw_dot || e_idx > 0 {
// "1.1." or "1e1.1"
break;
}
saw_dot = true;
if saw_digit {
// "123." is valid.
valid_len = i + 1;
}
} else if c == 'e' || c == 'E' {
if !saw_digit {
// "+.e"
break;
}
if e_idx != 0 {
// "1e5e"
break;
}
e_idx = i
} else if c < '0' || c > '9' {
break;
} else {
saw_digit = true;
valid_len = i + 1;
}
}
if valid_len == 0 || valid_len < s.len() {
ctx.handle_truncate_err(Error::truncated_wrong_val("INTEGER", s))?;
}
if valid_len == 0 {
Ok("0")
} else {
Ok(&s[..valid_len])
}
}
/// the `s` must be a valid int_str
fn round_int_str(num_next_dot: char, s: &str) -> Cow<'_, str> {
if num_next_dot < '5' {
return Cow::Borrowed(s);
}
let mut int_str = String::with_capacity(s.len() + 1);
match s.rfind(|c| c != '9' && c != '+' && c != '-') {
Some(idx) => {
int_str.push_str(&s[..idx]);
// because the `s` must be valid int_str, so it is ok to do this.
let next_char = (s.as_bytes()[idx] + 1) as char;
int_str.push(next_char);
let zero_count = s.len() - (idx + 1);
if zero_count > 0 {
for _i in 0..zero_count {
int_str.push('0');
}
}
}
None => {
let zero_count = if s.starts_with('+') || s.starts_with('-') {
int_str.push_str(&s[..1]);
s.len() - 1
} else {
s.len()
};
int_str.push('1');
int_str.extend((0..zero_count).map(|_| '0'));
}
}
Cow::Owned(int_str)
}
/// It converts a valid float string into valid integer string which can be
/// parsed by `i64::from_str`, we can't parse float first then convert it to string
/// because precision will be lost.
///
/// When the float string indicating a value that is overflowing the i64,
/// the original float string is returned and an overflow warning is attached.
///
/// This func will find serious overflow such as the len of result > 20 (without prefix `+/-`)
/// however, it will not check whether the result overflow BIGINT.
fn float_str_to_int_string<'a>(
ctx: &mut EvalContext,
valid_float: &'a str,
) -> Result<Cow<'a, str>> {
// this func is complex, to make it same as TiDB's version,
// we impl it like TiDB's version(https://github.com/pingcap/tidb/blob/9b521342bf/types/convert.go#L400)
let mut dot_idx = None;
let mut e_idx = None;
for (i, c) in valid_float.chars().enumerate() {
match c {
'.' => dot_idx = Some(i),
'e' | 'E' => e_idx = Some(i),
_ => (),
}
}
match (dot_idx, e_idx) {
(None, None) => Ok(Cow::Borrowed(valid_float)),
(Some(di), None) => no_exp_float_str_to_int_str(valid_float, di),
(_, Some(ei)) => exp_float_str_to_int_str(ctx, valid_float, ei, dot_idx),
}
}
fn exp_float_str_to_int_str<'a>(
ctx: &mut EvalContext,
valid_float: &'a str,
e_idx: usize,
dot_idx: Option<usize>,
) -> Result<Cow<'a, str>> {
// int_cnt and digits contain the prefix `+/-` if valid_float[0] is `+/-`
let mut digits: Vec<u8> = Vec::with_capacity(valid_float.len());
let int_cnt: i64;
match dot_idx {
None => {
digits.extend_from_slice(&valid_float[..e_idx].as_bytes());
// if digits.len() > i64::MAX,
// then the input str has at least 9223372036854775808 chars,
// which make the str >= 8388608.0 TB,
// so cast it to i64 is safe.
int_cnt = digits.len() as i64;
}
Some(dot_idx) => {
digits.extend_from_slice(&valid_float[..dot_idx].as_bytes());
int_cnt = digits.len() as i64;
digits.extend_from_slice(&valid_float[(dot_idx + 1)..e_idx].as_bytes());
}
}
// make `digits` immutable
let digits = digits;
let exp = match valid_float[(e_idx + 1)..].parse::<i64>() {
Ok(exp) => exp,
_ => return Ok(Cow::Borrowed(valid_float)),
};
let (int_cnt, is_overflow): (i64, bool) = int_cnt.overflowing_add(exp);
if int_cnt > 21 || is_overflow {
// MaxInt64 has 19 decimal digits.
// MaxUint64 has 20 decimal digits.
// And the intCnt may contain the len of `+/-`,
// so here we use 21 here as the early detection.
ctx.warnings
.append_warning(Error::overflow("BIGINT", &valid_float));
return Ok(Cow::Borrowed(valid_float));
}
if int_cnt <= 0 {
let int_str = "0";
if int_cnt == 0 && !digits.is_empty() && digits[0].is_ascii_digit() {
return Ok(round_int_str(digits[0] as char, int_str));
} else {
return Ok(Cow::Borrowed(int_str));
}
}
if int_cnt == 1 && (digits[0] == b'-' || digits[0] == b'+') {
let int_str = match digits[0] {
b'+' => "+0",
b'-' => "-0",
_ => "0",
};
let res = if digits.len() > 1 {
round_int_str(digits[1] as char, int_str)
} else {
Cow::Borrowed(int_str)
};
let tmp = &res.as_bytes()[0..2];
if tmp == b"+0" || tmp == b"-0" {
return Ok(Cow::Borrowed("0"));
} else {
return Ok(res);
}
}
let int_cnt = int_cnt as usize;
if int_cnt <= digits.len() {
let int_str = String::from_utf8_lossy(&digits[..int_cnt]);
if int_cnt < digits.len() {
Ok(Cow::Owned(
round_int_str(digits[int_cnt] as char, &int_str).into_owned(),
))
} else {
Ok(Cow::Owned(int_str.into_owned()))
}
} else {
let mut res = String::with_capacity(int_cnt);
for d in digits.iter() {
res.push(*d as char);
}
for _ in digits.len()..int_cnt {
res.push('0');
}
Ok(Cow::Owned(res))
}
}
fn no_exp_float_str_to_int_str(valid_float: &str, mut dot_idx: usize) -> Result<Cow<'_, str>> {
// According to TiDB's impl
// 1. If there is digit after dot, round.
// 2. Only when the final result <0, add '-' in the front of it.
// 3. The result has no '+'.
let digits = if valid_float.starts_with('+') || valid_float.starts_with('-') {
dot_idx -= 1;
&valid_float[1..]
} else {
valid_float
};
// TODO: may here we can use Cow to avoid some copy below
let int_str = if valid_float.starts_with('-') {
if dot_idx == 0 {
"-0"
} else {
// the valid_float[0] is '-', so there is `dot_idx-=1` above,
// so we need valid_float[..(dot_idx+1)] here.
&valid_float[..=dot_idx]
}
} else if dot_idx == 0 {
"0"
} else {
&digits[..dot_idx]
};
let res = if digits.len() > dot_idx + 1 {
round_int_str(digits.as_bytes()[dot_idx + 1] as char, int_str)
} else {
Cow::Borrowed(int_str)
};
// in the TiDB version, after round, except '0',
// others(even if `00`) will be prefix with `-` if valid_float[0]=='-'.
// so we need to remove `-` of `-0`.
let res_bytes = res.as_bytes();
if res_bytes == b"-0" {
Ok(Cow::Owned(String::from(&res[1..])))
} else {
Ok(res)
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::float_cmp)]
use std::fmt::Debug;
use std::sync::Arc;
use std::{f64, i64, isize, u64};
use crate::codec::error::{
ERR_DATA_OUT_OF_RANGE, ERR_M_BIGGER_THAN_D, ERR_TRUNCATE_WRONG_VALUE, WARN_DATA_TRUNCATED,
};
use crate::codec::mysql::{Res, UNSPECIFIED_FSP};
use crate::expr::{EvalConfig, EvalContext, Flag};
use crate::{Collation, FieldTypeFlag};
use super::*;
#[test]
fn test_int_to_int() {
let tests: Vec<(i64, FieldTypeTp, Option<i64>)> = vec![
(123, FieldTypeTp::Tiny, Some(123)),
(-123, FieldTypeTp::Tiny, Some(-123)),
(256, FieldTypeTp::Tiny, None),
(-257, FieldTypeTp::Tiny, None),
(123, FieldTypeTp::Short, Some(123)),
(-123, FieldTypeTp::Short, Some(-123)),
(65536, FieldTypeTp::Short, None),
(-65537, FieldTypeTp::Short, None),
(123, FieldTypeTp::Int24, Some(123)),
(-123, FieldTypeTp::Int24, Some(-123)),
(8388610, FieldTypeTp::Int24, None),
(-8388610, FieldTypeTp::Int24, None),
(8388610, FieldTypeTp::Long, Some(8388610)),
(-8388610, FieldTypeTp::Long, Some(-8388610)),
(4294967297, FieldTypeTp::Long, None),
(-4294967297, FieldTypeTp::Long, None),
(8388610, FieldTypeTp::LongLong, Some(8388610)),
(-8388610, FieldTypeTp::LongLong, Some(-8388610)),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_int(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_uint_into_int() {
let tests: Vec<(u64, FieldTypeTp, Option<i64>)> = vec![
(123, FieldTypeTp::Tiny, Some(123)),
(256, FieldTypeTp::Tiny, None),
(123, FieldTypeTp::Short, Some(123)),
(65536, FieldTypeTp::Short, None),
(123, FieldTypeTp::Int24, Some(123)),
(8388610, FieldTypeTp::Int24, None),
(8388610, FieldTypeTp::Long, Some(8388610)),
(4294967297, FieldTypeTp::Long, None),
(4294967297, FieldTypeTp::LongLong, Some(4294967297)),
(u64::MAX, FieldTypeTp::LongLong, None),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_int(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_float_to_int() {
let tests: Vec<(f64, FieldTypeTp, Option<i64>)> = vec![
(123.1, FieldTypeTp::Tiny, Some(123)),
(123.6, FieldTypeTp::Tiny, Some(124)),
(-123.1, FieldTypeTp::Tiny, Some(-123)),
(-123.6, FieldTypeTp::Tiny, Some(-124)),
(256.5, FieldTypeTp::Tiny, None),
(256.1, FieldTypeTp::Short, Some(256)),
(256.6, FieldTypeTp::Short, Some(257)),
(-256.1, FieldTypeTp::Short, Some(-256)),
(-256.6, FieldTypeTp::Short, Some(-257)),
(65535.5, FieldTypeTp::Short, None),
(65536.1, FieldTypeTp::Int24, Some(65536)),
(65536.5, FieldTypeTp::Int24, Some(65537)),
(-65536.1, FieldTypeTp::Int24, Some(-65536)),
(-65536.5, FieldTypeTp::Int24, Some(-65537)),
(8388610.2, FieldTypeTp::Int24, None),
(8388610.4, FieldTypeTp::Long, Some(8388610)),
(8388610.5, FieldTypeTp::Long, Some(8388611)),
(-8388610.4, FieldTypeTp::Long, Some(-8388610)),
(-8388610.5, FieldTypeTp::Long, Some(-8388611)),
(4294967296.8, FieldTypeTp::Long, None),
(4294967296.8, FieldTypeTp::LongLong, Some(4294967297)),
(4294967297.1, FieldTypeTp::LongLong, Some(4294967297)),
(-4294967296.8, FieldTypeTp::LongLong, Some(-4294967297)),
(-4294967297.1, FieldTypeTp::LongLong, Some(-4294967297)),
(f64::MAX, FieldTypeTp::LongLong, None),
(f64::MIN, FieldTypeTp::LongLong, None),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_int(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_bytes_to_int() {
let tests: Vec<(&[u8], FieldTypeTp, Option<i64>)> = vec![
(b"123.1", FieldTypeTp::Tiny, Some(123)),
(b"1.231e2", FieldTypeTp::Tiny, Some(123)),
(b"1.235e2", FieldTypeTp::Tiny, Some(124)),
(b"123.6", FieldTypeTp::Tiny, Some(124)),
(b"-123.1", FieldTypeTp::Tiny, Some(-123)),
(b"-123.6", FieldTypeTp::Tiny, Some(-124)),
(b"256.5", FieldTypeTp::Tiny, None),
(b"256.1", FieldTypeTp::Short, Some(256)),
(b"256.6", FieldTypeTp::Short, Some(257)),
(b"-256.1", FieldTypeTp::Short, Some(-256)),
(b"-256.6", FieldTypeTp::Short, Some(-257)),
(b"65535.5", FieldTypeTp::Short, None),
(b"65536.1", FieldTypeTp::Int24, Some(65536)),
(b"65536.5", FieldTypeTp::Int24, Some(65537)),
(b"-65536.1", FieldTypeTp::Int24, Some(-65536)),
(b"-65536.5", FieldTypeTp::Int24, Some(-65537)),
(b"8388610.2", FieldTypeTp::Int24, None),
(b"8388610.4", FieldTypeTp::Long, Some(8388610)),
(b"8388610.5", FieldTypeTp::Long, Some(8388611)),
(b"-8388610.4", FieldTypeTp::Long, Some(-8388610)),
(b"-8388610.5", FieldTypeTp::Long, Some(-8388611)),
(b"4294967296.8", FieldTypeTp::Long, None),
(b"4294967296.8", FieldTypeTp::LongLong, Some(4294967297)),
(b"4294967297.1", FieldTypeTp::LongLong, Some(4294967297)),
(b"-4294967296.8", FieldTypeTp::LongLong, Some(-4294967297)),
(b"-4294967297.1", FieldTypeTp::LongLong, Some(-4294967297)),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_int(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {:?}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_bytes_to_int_overflow() {
let tests: Vec<(&[u8], _, _)> = vec![
(
b"12e1234817291749271847289417294",
FieldTypeTp::LongLong,
9223372036854775807,
),
(
b"12e1234817291749271847289417294",
FieldTypeTp::Long,
2147483647,
),
(b"12e1234817291749271847289417294", FieldTypeTp::Tiny, 127),
];
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::OVERFLOW_AS_WARNING)));
for (from, tp, to) in tests {
let r = from.to_int(&mut ctx, tp).unwrap();
assert_eq!(to, r);
}
}
#[test]
fn test_datatype_to_int_overflow() {
fn test_overflow<T: Debug + Clone + ToInt>(raw: T, dst: i64, tp: FieldTypeTp) {
let mut ctx = EvalContext::default();
let val = raw.to_int(&mut ctx, tp);
match val {
Err(e) => assert_eq!(
e.code(),
ERR_DATA_OUT_OF_RANGE,
"expect code {}, but got: {}",
ERR_DATA_OUT_OF_RANGE,
e.code()
),
res => panic!("expect convert {:?} to overflow, but got {:?}", raw, res),
};
// OVERFLOW_AS_WARNING
let mut ctx =
EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::OVERFLOW_AS_WARNING)));
let val = raw.to_int(&mut ctx, tp);
assert_eq!(val.unwrap(), dst);
assert_eq!(ctx.warnings.warning_cnt, 1);
}
// int_to_int
let cases: Vec<(i64, i64, FieldTypeTp)> = vec![
(12345, 127, FieldTypeTp::Tiny),
(-12345, -128, FieldTypeTp::Tiny),
(123456, 32767, FieldTypeTp::Short),
(-123456, -32768, FieldTypeTp::Short),
(83886078, 8388607, FieldTypeTp::Int24),
(-83886078, -8388608, FieldTypeTp::Int24),
(i64::MAX, 2147483647, FieldTypeTp::Long),
(i64::MIN, -2147483648, FieldTypeTp::Long),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
// uint_to_int
let cases: Vec<(u64, i64, FieldTypeTp)> = vec![
(12345, 127, FieldTypeTp::Tiny),
(123456, 32767, FieldTypeTp::Short),
(83886078, 8388607, FieldTypeTp::Int24),
(u64::MAX, 2147483647, FieldTypeTp::Long),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
// float_to_int
let cases: Vec<(f64, i64, FieldTypeTp)> = vec![
(127.5, 127, FieldTypeTp::Tiny),
(12345f64, 127, FieldTypeTp::Tiny),
(-12345f64, -128, FieldTypeTp::Tiny),
(32767.6, 32767, FieldTypeTp::Short),
(123456f64, 32767, FieldTypeTp::Short),
(-123456f64, -32768, FieldTypeTp::Short),
(8388607.7, 8388607, FieldTypeTp::Int24),
(83886078f64, 8388607, FieldTypeTp::Int24),
(-83886078f64, -8388608, FieldTypeTp::Int24),
(2147483647.8, 2147483647, FieldTypeTp::Long),
(-2147483648.8, -2147483648, FieldTypeTp::Long),
(f64::MAX, 2147483647, FieldTypeTp::Long),
(f64::MIN, -2147483648, FieldTypeTp::Long),
(f64::MAX, i64::MAX, FieldTypeTp::LongLong),
(f64::MIN, i64::MIN, FieldTypeTp::LongLong),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
// bytes_to_int
let cases: Vec<(&[u8], i64, FieldTypeTp)> = vec![
(b"127.5", 127, FieldTypeTp::Tiny),
(b"128.5", 127, FieldTypeTp::Tiny),
(b"12345", 127, FieldTypeTp::Tiny),
(b"-12345", -128, FieldTypeTp::Tiny),
(b"32768.6", 32767, FieldTypeTp::Short),
(b"123456", 32767, FieldTypeTp::Short),
(b"-123456", -32768, FieldTypeTp::Short),
(b"8388608.7", 8388607, FieldTypeTp::Int24),
(b"83886078", 8388607, FieldTypeTp::Int24),
(b"-83886078", -8388608, FieldTypeTp::Int24),
(b"2147483649.8", 2147483647, FieldTypeTp::Long),
(b"-2147483649", -2147483648, FieldTypeTp::Long),
(b"314748364221339834234239", i64::MAX, FieldTypeTp::LongLong),
(
b"-314748364221339834234239",
i64::MIN,
FieldTypeTp::LongLong,
),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
}
#[test]
fn test_bytes_to_int_truncated() {
let mut ctx = EvalContext::default();
let bs = b"123bb".to_vec();
let val = bs.to_int(&mut ctx, FieldTypeTp::LongLong);
assert!(val.is_err());
assert_eq!(val.unwrap_err().code(), ERR_TRUNCATE_WRONG_VALUE);
// Invalid UTF8 chars
let mut ctx = EvalContext::default();
let invalid_utf8: Vec<u8> = vec![0, 159, 146, 150];
let val = invalid_utf8.to_int(&mut ctx, FieldTypeTp::LongLong);
assert!(val.is_err());
assert_eq!(val.unwrap_err().code(), WARN_DATA_TRUNCATED);
// IGNORE_TRUNCATE
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::IGNORE_TRUNCATE)));
let val = bs.to_int(&mut ctx, FieldTypeTp::LongLong);
assert_eq!(val.unwrap(), 123i64);
assert_eq!(ctx.warnings.warning_cnt, 0);
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::IGNORE_TRUNCATE)));
let invalid_utf8 = vec![b'1', b'2', b'3', 0, 159, 146, 150];
let val = invalid_utf8.to_int(&mut ctx, FieldTypeTp::LongLong);
assert_eq!(val.unwrap(), 123i64);
assert_eq!(ctx.warnings.warning_cnt, 0);
// TRUNCATE_AS_WARNING
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val = bs.to_int(&mut ctx, FieldTypeTp::LongLong);
assert_eq!(val.unwrap(), 123i64);
assert_eq!(ctx.warnings.warning_cnt, 1);
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val = invalid_utf8.to_int(&mut ctx, FieldTypeTp::LongLong);
assert_eq!(val.unwrap(), 123i64);
// note:
// warning 1: vec!['1' as u8, '2' as u8, '3' as u8, 0, 159, 146, 150] -> utf8
// warning 2: vec!['1' as u8, '2' as u8, '3' as u8, 0] -> float
assert_eq!(
ctx.warnings.warning_cnt, 2,
"unexpected warning: {:?}",
ctx.warnings.warnings
);
}
#[test]
fn test_bytes_to_int_without_context() {
let tests: Vec<(&'static [u8], i64)> = vec![
(b"0", 0),
(b" 23a", 23),
(b"\t 23a", 23),
(b"\r23a", 0),
(b"1", 1),
(b"2.1", 2),
(b"23e10", 23),
(b"ab", 0),
(b"4a", 4),
(b"+1024", 1024),
(b"-231", -231),
(b"", 0),
(b"9223372036854775807", i64::MAX),
(b"-9223372036854775808", i64::MIN),
];
for (bs, n) in tests {
let t = super::bytes_to_int_without_context(bs).unwrap();
if t != n {
panic!("expect convert {:?} to {}, but got {}", bs, n, t);
}
}
let invalid_cases: Vec<&'static [u8]> =
vec![b"9223372036854775809", b"-9223372036854775810"];
for bs in invalid_cases {
match super::bytes_to_int_without_context(bs) {
Err(e) => assert!(e.is_overflow()),
res => panic!("expect convert {:?} to overflow, but got {:?}", bs, res),
};
}
}
#[test]
fn test_json_to_int() {
let test_cases = vec![
("{}", 0),
("[]", 0),
("3", 3),
("-3", -3),
("4.1", 4),
("4.5", 5),
("true", 1),
("false", 0),
("null", 0),
(r#""hello""#, 0),
(r#""1234""#, 1234),
];
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
for (jstr, exp) in test_cases {
let json: Json = jstr.parse().unwrap();
let get = json.to_int(&mut ctx, FieldTypeTp::LongLong).unwrap();
assert_eq!(get, exp, "json.as_i64 get: {}, exp: {}", get, exp);
}
}
#[test]
fn test_cast_err_when_json_array_or_object_to_int() {
let test_cases = vec![
("{}", ERR_TRUNCATE_WRONG_VALUE),
("[]", ERR_TRUNCATE_WRONG_VALUE),
];
// avoid to use EvalConfig::default_for_test() that set Flag::IGNORE_TRUNCATE as true
let mut ctx = EvalContext::new(Arc::new(EvalConfig::new()));
for (jstr, exp) in test_cases {
let json: Json = jstr.parse().unwrap();
let result: Result<i64> = json.to_int(&mut ctx, FieldTypeTp::LongLong);
let err = result.unwrap_err();
assert_eq!(
err.code(),
exp,
"json.as_f64 get: {}, exp: {}",
err.code(),
exp
);
}
}
#[test]
fn test_int_to_uint() {
let tests: Vec<(i64, FieldTypeTp, Option<u64>)> = vec![
(123, FieldTypeTp::Tiny, Some(123)),
(256, FieldTypeTp::Tiny, None),
(123, FieldTypeTp::Short, Some(123)),
(65536, FieldTypeTp::Short, None),
(123, FieldTypeTp::Int24, Some(123)),
(16777216, FieldTypeTp::Int24, None),
(16777216, FieldTypeTp::Long, Some(16777216)),
(4294967297, FieldTypeTp::Long, None),
(8388610, FieldTypeTp::LongLong, Some(8388610)),
(-1, FieldTypeTp::LongLong, Some(u64::MAX)),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_uint(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {}, to tp: {} should be overflow",
from,
tp
),
}
}
// SHOULD_CLIP_TO_ZERO
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::IN_INSERT_STMT)));
let r = (-12345 as i64).to_uint(&mut ctx, FieldTypeTp::LongLong);
assert!(r.is_err());
// SHOULD_CLIP_TO_ZERO | OVERFLOW_AS_WARNING
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(
Flag::IN_INSERT_STMT | Flag::OVERFLOW_AS_WARNING,
)));
let r = (-12345 as i64)
.to_uint(&mut ctx, FieldTypeTp::LongLong)
.unwrap();
assert_eq!(r, 0);
}
#[test]
fn test_uint_into_uint() {
let tests: Vec<(u64, FieldTypeTp, Option<u64>)> = vec![
(123, FieldTypeTp::Tiny, Some(123)),
(256, FieldTypeTp::Tiny, None),
(123, FieldTypeTp::Short, Some(123)),
(65536, FieldTypeTp::Short, None),
(123, FieldTypeTp::Int24, Some(123)),
(16777216, FieldTypeTp::Int24, None),
(8388610, FieldTypeTp::Long, Some(8388610)),
(4294967297, FieldTypeTp::Long, None),
(4294967297, FieldTypeTp::LongLong, Some(4294967297)),
(u64::MAX, FieldTypeTp::LongLong, Some(u64::MAX)),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_uint(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_float_to_uint() {
let tests: Vec<(f64, FieldTypeTp, Option<u64>)> = vec![
(123.1, FieldTypeTp::Tiny, Some(123)),
(123.6, FieldTypeTp::Tiny, Some(124)),
(256.5, FieldTypeTp::Tiny, None),
(256.1, FieldTypeTp::Short, Some(256)),
(256.6, FieldTypeTp::Short, Some(257)),
(65535.5, FieldTypeTp::Short, None),
(65536.1, FieldTypeTp::Int24, Some(65536)),
(65536.5, FieldTypeTp::Int24, Some(65537)),
(16777215.4, FieldTypeTp::Int24, Some(16777215)),
(16777216.1, FieldTypeTp::Int24, None),
(8388610.4, FieldTypeTp::Long, Some(8388610)),
(8388610.5, FieldTypeTp::Long, Some(8388611)),
(4294967296.8, FieldTypeTp::Long, None),
(4294967296.8, FieldTypeTp::LongLong, Some(4294967297)),
(4294967297.1, FieldTypeTp::LongLong, Some(4294967297)),
(-4294967297.1, FieldTypeTp::LongLong, None),
(f64::MAX, FieldTypeTp::LongLong, None),
(f64::MIN, FieldTypeTp::LongLong, None),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_uint(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_bytes_to_uint() {
let tests: Vec<(&[u8], FieldTypeTp, Option<u64>)> = vec![
(b"123.1", FieldTypeTp::Tiny, Some(123)),
(b"1.231e2", FieldTypeTp::Tiny, Some(123)),
(b"1.235e2", FieldTypeTp::Tiny, Some(124)),
(b"123.6", FieldTypeTp::Tiny, Some(124)),
(b"256.5", FieldTypeTp::Tiny, None),
(b"256.1", FieldTypeTp::Short, Some(256)),
(b"256.6", FieldTypeTp::Short, Some(257)),
(b"65535.5", FieldTypeTp::Short, None),
(b"65536.1", FieldTypeTp::Int24, Some(65536)),
(b"65536.5", FieldTypeTp::Int24, Some(65537)),
(b"18388610.2", FieldTypeTp::Int24, None),
(b"8388610.4", FieldTypeTp::Long, Some(8388610)),
(b"8388610.5", FieldTypeTp::Long, Some(8388611)),
(b"4294967296.8", FieldTypeTp::Long, None),
(b"4294967296.8", FieldTypeTp::LongLong, Some(4294967297)),
(b"4294967297.1", FieldTypeTp::LongLong, Some(4294967297)),
];
let mut ctx = EvalContext::default();
for (from, tp, to) in tests {
let r = from.to_uint(&mut ctx, tp);
match to {
Some(to) => assert_eq!(to, r.unwrap()),
None => assert!(
r.is_err(),
"from: {:?}, to tp: {} should be overflow",
from,
tp
),
}
}
}
#[test]
fn test_bytes_to_uint_without_context() {
let tests: Vec<(&'static [u8], u64)> = vec![
(b"0", 0),
(b" 23a", 23),
(b"\t 23a", 23),
(b"\r23a", 0),
(b"1", 1),
(b"2.1", 2),
(b"23e10", 23),
(b"ab", 0),
(b"4a", 4),
(b"+1024", 1024),
(b"231", 231),
(b"18446744073709551615", u64::MAX),
];
for (bs, n) in tests {
let t = super::bytes_to_uint_without_context(bs).unwrap();
if t != n {
panic!("expect convert {:?} to {}, but got {}", bs, n, t);
}
}
let invalid_cases: Vec<&'static [u8]> = vec![b"18446744073709551616"];
for bs in invalid_cases {
match super::bytes_to_uint_without_context(bs) {
Err(e) => assert!(e.is_overflow()),
res => panic!("expect convert {:?} to overflow, but got {:?}", bs, res),
};
}
}
#[test]
fn test_datatype_to_uint_overflow() {
fn test_overflow<T: Debug + Clone + ToInt>(raw: T, dst: u64, tp: FieldTypeTp) {
let mut ctx = EvalContext::default();
let val = raw.to_uint(&mut ctx, tp);
match val {
Err(e) => assert_eq!(
e.code(),
ERR_DATA_OUT_OF_RANGE,
"expect code {}, but got: {}",
ERR_DATA_OUT_OF_RANGE,
e.code()
),
res => panic!("expect convert {:?} to overflow, but got {:?}", raw, res),
};
// OVERFLOW_AS_WARNING
let mut ctx =
EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::OVERFLOW_AS_WARNING)));
let val = raw.to_uint(&mut ctx, tp);
assert_eq!(val.unwrap(), dst, "{:?} => {}", raw, dst);
assert_eq!(ctx.warnings.warning_cnt, 1);
}
// int_to_uint
let cases: Vec<(i64, u64, FieldTypeTp)> = vec![
(12345, 255, FieldTypeTp::Tiny),
(-1, 255, FieldTypeTp::Tiny),
(123456, 65535, FieldTypeTp::Short),
(-1, 65535, FieldTypeTp::Short),
(16777216, 16777215, FieldTypeTp::Int24),
(i64::MAX, 4294967295, FieldTypeTp::Long),
(i64::MIN, u64::from(u32::MAX), FieldTypeTp::Long),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
// uint_to_uint
let cases: Vec<(u64, u64, FieldTypeTp)> = vec![
(12345, 255, FieldTypeTp::Tiny),
(123456, 65535, FieldTypeTp::Short),
(16777216, 16777215, FieldTypeTp::Int24),
(u64::MAX, 4294967295, FieldTypeTp::Long),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
// float_to_uint
let cases: Vec<(f64, u64, FieldTypeTp)> = vec![
(255.5, 255, FieldTypeTp::Tiny),
(12345f64, 255, FieldTypeTp::Tiny),
(65535.6, 65535, FieldTypeTp::Short),
(123456f64, 65535, FieldTypeTp::Short),
(16777215.7, 16777215, FieldTypeTp::Int24),
(83886078f64, 16777215, FieldTypeTp::Int24),
(4294967296.8, 4294967295, FieldTypeTp::Long),
(f64::MAX, 4294967295, FieldTypeTp::Long),
(f64::MAX, u64::MAX, FieldTypeTp::LongLong),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
// bytes_to_uint
let cases: Vec<(&[u8], u64, FieldTypeTp)> = vec![
(b"255.5", 255, FieldTypeTp::Tiny),
(b"12345", 255, FieldTypeTp::Tiny),
(b"65535.6", 65535, FieldTypeTp::Short),
(b"123456", 65535, FieldTypeTp::Short),
(b"16777215.7", 16777215, FieldTypeTp::Int24),
(b"183886078", 16777215, FieldTypeTp::Int24),
(b"4294967295.5", 4294967295, FieldTypeTp::Long),
(b"314748364221339834234239", u64::MAX, FieldTypeTp::LongLong),
];
for (raw, dst, tp) in cases {
test_overflow(raw, dst, tp);
}
}
#[test]
fn test_bytes_to_uint_truncated() {
let mut ctx = EvalContext::default();
let bs = b"123bb".to_vec();
let val = bs.to_uint(&mut ctx, FieldTypeTp::LongLong);
match val {
Err(e) => assert_eq!(
e.code(),
ERR_TRUNCATE_WRONG_VALUE,
"expect data truncated, but got {:?}",
e
),
res => panic!("expect convert {:?} to truncated, but got {:?}", bs, res),
};
// IGNORE_TRUNCATE
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::IGNORE_TRUNCATE)));
let val = bs.to_uint(&mut ctx, FieldTypeTp::LongLong);
assert_eq!(val.unwrap(), 123);
// TRUNCATE_AS_WARNING
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val = bs.to_uint(&mut ctx, FieldTypeTp::LongLong);
assert_eq!(val.unwrap(), 123);
assert_eq!(ctx.warnings.warnings.len(), 1);
}
#[test]
fn test_json_to_uint() {
let test_cases = vec![
("{}", 0u64),
("[]", 0u64),
("3", 3u64),
("4.1", 4u64),
("4.5", 5u64),
("true", 1u64),
("false", 0u64),
("null", 0u64),
(r#""hello""#, 0u64),
(r#""1234""#, 1234u64),
];
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
for (jstr, exp) in test_cases {
let json: Json = jstr.parse().unwrap();
let get = json.to_uint(&mut ctx, FieldTypeTp::LongLong).unwrap();
assert_eq!(get, exp, "json.as_u64 get: {}, exp: {}", get, exp);
}
}
#[test]
fn test_cast_err_when_json_array_or_object_to_uint() {
let test_cases = vec![
("{}", ERR_TRUNCATE_WRONG_VALUE),
("[]", ERR_TRUNCATE_WRONG_VALUE),
];
// avoid to use EvalConfig::default_for_test() that set Flag::IGNORE_TRUNCATE as true
let mut ctx = EvalContext::new(Arc::new(EvalConfig::new()));
for (jstr, exp) in test_cases {
let json: Json = jstr.parse().unwrap();
let result: Result<u64> = json.to_uint(&mut ctx, FieldTypeTp::LongLong);
let err = result.unwrap_err();
assert_eq!(
err.code(),
exp,
"json.as_f64 get: {}, exp: {}",
err.code(),
exp
);
}
}
#[test]
fn test_bytes_to_f64() {
let tests: Vec<(&'static [u8], Option<f64>)> = vec![
(b"", None),
(b" 23", Some(23.0)),
(b"-1", Some(-1.0)),
(b"1.11", Some(1.11)),
(b"1.11.00", None),
(b"xx", None),
(b"0x00", None),
(b"11.xx", None),
(b"xx.11", None),
];
let mut ctx = EvalContext::default();
for (i, (v, expect)) in tests.iter().enumerate() {
let ff: Result<f64> = v.convert(&mut ctx);
match expect {
Some(val) => {
assert_eq!(ff.unwrap(), *val);
}
None => {
assert!(
ff.is_err(),
"index: {}, {:?} should not be converted, but got: {:?}",
i,
v,
ff
);
}
}
}
// test overflow
let mut ctx = EvalContext::default();
let val: Result<f64> = f64::INFINITY.to_string().as_bytes().convert(&mut ctx);
assert!(val.is_err());
let mut ctx = EvalContext::default();
let val: Result<f64> = f64::NEG_INFINITY.to_string().as_bytes().convert(&mut ctx);
assert!(val.is_err());
// TRUNCATE_AS_WARNING
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val: f64 = (0..309)
.map(|_| '9')
.collect::<String>()
.as_bytes()
.convert(&mut ctx)
.unwrap();
assert_eq!(val, f64::MAX);
assert_eq!(ctx.warnings.warning_cnt, 1);
assert_eq!(
ctx.warnings.warnings[0].get_code(),
ERR_TRUNCATE_WRONG_VALUE
);
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val: f64 = (0..310)
.map(|i| if i == 0 { '-' } else { '9' })
.collect::<String>()
.as_bytes()
.convert(&mut ctx)
.unwrap();
assert_eq!(val, f64::MIN);
assert_eq!(ctx.warnings.warning_cnt, 1);
assert_eq!(
ctx.warnings.warnings[0].get_code(),
ERR_TRUNCATE_WRONG_VALUE
);
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val: Result<f64> = b"".to_vec().convert(&mut ctx);
assert!(val.is_ok());
assert_eq!(val.unwrap(), 0.0);
assert_eq!(ctx.warnings.warnings.len(), 1);
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING)));
let val: Result<f64> = b"1.1a".to_vec().convert(&mut ctx);
assert!(val.is_ok());
assert_eq!(val.unwrap(), 1.1);
assert_eq!(ctx.warnings.warnings.len(), 1);
// IGNORE_TRUNCATE
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(Flag::IGNORE_TRUNCATE)));
let val: Result<f64> = b"1.2a".to_vec().convert(&mut ctx);
assert!(val.is_ok());
assert_eq!(val.unwrap(), 1.2);
assert_eq!(ctx.warnings.warnings.len(), 0);
}
#[test]
fn test_get_valid_float_prefix() {
let cases = vec![
("-100", "-100"),
("1abc", "1"),
("-1-1", "-1"),
("+1+1", "+1"),
("123..34", "123."),
("123.23E-10", "123.23E-10"),
("1.1e1.3", "1.1e1"),
("11e1.3", "11e1"),
("1.1e-13a", "1.1e-13"),
("1.", "1."),
(".1", ".1"),
("", "0"),
("123e+", "123"),
("123.e", "123."),
("1-1-", "1"),
("11-1-", "11"),
("-1-1-", "-1"),
];
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
for (i, o) in cases {
assert_eq!(super::get_valid_float_prefix(&mut ctx, i).unwrap(), o);
}
}
#[test]
fn test_round_int_str() {
let cases = vec![
("123", '1', "123"),
("123", '4', "123"),
("123", '5', "124"),
("123", '6', "124"),
("999", '6', "1000"),
("998", '6', "999"),
("989", '6', "990"),
("989898979", '6', "989898980"),
("989898999", '6', "989899000"),
("+989898999", '6', "+989899000"),
("-989898999", '6', "-989899000"),
];
for (s, n, expect) in cases {
let got = round_int_str(n, s);
assert_eq!(
got, expect,
"round int str: {}, {}, expect: {}, got: {}",
s, n, expect, got
)
}
}
#[test]
fn test_invalid_get_valid_int_prefix() {
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
let cases = vec!["1e21", "1e9223372036854775807"];
// Firstly, make sure no error returns, instead a valid float string is returned
for i in cases {
let o = super::get_valid_int_prefix(&mut ctx, i);
assert_eq!(o.unwrap(), i);
}
// Secondly, make sure warnings are attached when the float string cannot be casted to a valid int string
let warnings = ctx.take_warnings().warnings;
assert_eq!(warnings.len(), 2);
for warning in warnings {
assert_eq!(warning.get_code(), ERR_DATA_OUT_OF_RANGE);
}
}
#[test]
fn test_valid_get_valid_int_prefix() {
let mut ctx = EvalContext::new(Arc::new(EvalConfig::default_for_test()));
let cases = vec![
("+0.0", "0"),
("+000.0", "000"),
("-0.0", "0"),
("-000.0", "-000"),
(".1", "0"),
(".0", "0"),
(".5", "1"),
("+.5", "1"),
("-.5", "-1"),
(".5e0", "1"),
("+.5e0", "+1"),
("-.5e0", "-1"),
("6.01e-1", "1"),
("123", "123"),
("255.5", "256"),
("123e1", "1230"),
("123.1e2", "12310"),
("1.231e2", "123"),
("1.236e2", "124"),
("123.45e5", "12345000"),
("123.55e5", "12355000"),
("123.45678e5", "12345678"),
("123.456789e5", "12345679"),
("123.456784e5", "12345678"),
("123.456999e5", "12345700"),
("-123.45678e5", "-12345678"),
("+123.45678e5", "+12345678"),
("9e20", "900000000000000000000"),
];
for (i, e) in cases {
let o = super::get_valid_int_prefix(&mut ctx, i);
assert_eq!(o.unwrap(), *e, "{}, {}", i, e);
}
assert_eq!(ctx.take_warnings().warnings.len(), 0);
let mut ctx = EvalContext::new(Arc::new(EvalConfig::from_flag(
Flag::IN_SELECT_STMT | Flag::IGNORE_TRUNCATE | Flag::OVERFLOW_AS_WARNING,
)));
let cases = vec![
("+0.0", "+0"),
("100", "100"),
("+100", "+100"),
("-100", "-100"),
("9e20", "9"),
("+9e20", "+9"),
("-9e20", "-9"),
("-900e20", "-900"),
];
for (i, e) in cases {
let o = super::get_valid_int_prefix(&mut ctx, i);
assert_eq!(o.unwrap(), *e, "{}, {}", i, e);
}
assert_eq!(ctx.take_warnings().warnings.len(), 0);
}
#[test]
fn test_truncate_binary() {
let s = b"123456789".to_vec();
let mut s1 = s.clone();
truncate_binary(&mut s1, crate::def::UNSPECIFIED_LENGTH);
assert_eq!(s1, s);
let mut s2 = s.clone();
truncate_binary(&mut s2, isize::MAX);
assert_eq!(s2, s);
let mut s3 = s;
truncate_binary(&mut s3, 0);
assert!(s3.is_empty());
// TODO port tests from tidb(tidb haven't implemented now)
}
#[test]
fn test_truncate_f64() {
let cases = vec![
(100.114, 10, 2, Res::Ok(100.11)),
(100.115, 10, 2, Res::Ok(100.12)),
(100.1156, 10, 3, Res::Ok(100.116)),
(100.1156, 3, 1, Res::Overflow(99.9)),
(1.36, 10, 2, Res::Ok(1.36)),
(f64::NAN, 10, 1, Res::Overflow(0f64)),
];
for (f, flen, decimal, exp) in cases {
let res = truncate_f64(f, flen, decimal);
assert_eq!(res, exp);
}
}
#[test]
fn test_produce_str_with_specified_tp() {
let cases = vec![
// branch 1
("世界,中国", 1, charset::CHARSET_UTF8),
("世界,中国", 2, charset::CHARSET_UTF8),
("世界,中国", 3, charset::CHARSET_UTF8),
("世界,中国", 4, charset::CHARSET_UTF8),
("世界,中国", 5, charset::CHARSET_UTF8),
("世界,中国", 6, charset::CHARSET_UTF8),
// branch 2
("世界,中国", 1, charset::CHARSET_ASCII),
("世界,中国", 2, charset::CHARSET_ASCII),
("世界,中国", 3, charset::CHARSET_ASCII),
("世界,中国", 4, charset::CHARSET_ASCII),
("世界,中国", 5, charset::CHARSET_ASCII),
("世界,中国", 6, charset::CHARSET_ASCII),
];
let cfg = EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING);
let mut ctx = EvalContext::new(Arc::new(cfg));
let mut ft = FieldType::default();
for (s, char_num, cs) in cases {
ft.set_charset(cs.to_string());
ft.set_flen(char_num);
let bs = s.as_bytes();
let r = produce_str_with_specified_tp(&mut ctx, Cow::Borrowed(bs), &ft, false);
assert!(r.is_ok(), "{}, {}, {}", s, char_num, cs);
let p = r.unwrap();
if cs == charset::CHARSET_UTF8MB4 || cs == charset::CHARSET_UTF8 {
let ns: String = s.chars().take(char_num as usize).collect();
assert_eq!(p.as_ref(), ns.as_bytes(), "{}, {}, {}", s, char_num, cs);
} else {
assert_eq!(
p.as_ref(),
&bs[..(char_num as usize)],
"{}, {}, {}",
s,
char_num,
cs
);
}
}
let cases = vec![
// branch 3
("世界,中国", 20, charset::CHARSET_ASCII),
("世界,中国", 30, charset::CHARSET_ASCII),
("世界,中国", 50, charset::CHARSET_ASCII),
];
use crate::FieldTypeAccessor;
let cfg = EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING);
let mut ctx = EvalContext::new(Arc::new(cfg));
let mut ft = FieldType::default();
let fta = ft.as_mut_accessor();
fta.set_tp(FieldTypeTp::String);
fta.set_collation(Collation::Binary);
for (s, char_num, cs) in cases {
ft.set_charset(cs.to_string());
ft.set_flen(char_num);
let bs = s.as_bytes();
let r = produce_str_with_specified_tp(&mut ctx, Cow::Borrowed(bs), &ft, true);
assert!(r.is_ok(), "{}, {}, {}", s, char_num, cs);
let p = r.unwrap();
assert_eq!(p.len(), char_num as usize, "{}, {}, {}", s, char_num, cs);
}
}
#[test]
fn test_produce_dec_with_specified_tp() {
use std::str::FromStr;
let cases = vec![
// branch 1
(
Decimal::from_str("11.1").unwrap(),
2,
2,
max_or_min_dec(false, 2u8, 2u8),
),
(
Decimal::from_str("-111.1").unwrap(),
2,
2,
max_or_min_dec(true, 2u8, 2u8),
),
// branch 2
(
Decimal::from_str("-1111.1").unwrap(),
5,
1,
Decimal::from_str("-1111.1").unwrap(),
),
(
Decimal::from_str("-111.111").unwrap(),
5,
2,
Decimal::from_str("-111.11").unwrap(),
),
];
let cfg = EvalConfig::from_flag(Flag::TRUNCATE_AS_WARNING | Flag::OVERFLOW_AS_WARNING);
let mut ctx = EvalContext::new(Arc::new(cfg));
let mut ft = FieldType::default();
for (dec, flen, decimal, want) in cases {
ft.set_flen(flen);
ft.set_decimal(decimal);
let nd = produce_dec_with_specified_tp(&mut ctx, dec, &ft);
assert!(nd.is_ok());
let nd = nd.unwrap();
assert_eq!(nd, want, "{}, {}, {}, {}, {}", dec, nd, want, flen, decimal);
}
}
#[test]
fn test_produce_dec_with_specified_tp_2() {
let ul = isize::from(UNSPECIFIED_FSP);
let cs = vec![
// (
// origin,
// (origin_flen, origin_decimal), (res_flen, res_decimal), is_unsigned,
// expect, warning_err_code,
// ((InInsertStmt || InUpdateStmt || InDeleteStmt), overflow_as_warning, truncate_as_warning)
// )
//
// The origin_flen, origin_decimal field is to
// let the programmer clearly know what the flen and decimal of the decimal is.
// res_flen and res_decimal isn't UNSPECIFIED_FSP
// flen < decimal
(
Decimal::zero(),
(1, 0),
(1, 2),
false,
Err(Error::m_bigger_than_d("")),
None,
(false, false, false),
),
(
Decimal::from(0),
(1, 0),
(1, 2),
false,
Err(Error::m_bigger_than_d("")),
None,
(false, false, false),
),
// origin not zero, but res's int part len < origin's int part
(
Decimal::from(1024),
(4, 0),
(3, 0),
false,
Ok(Decimal::from(999)),
Some(ERR_DATA_OUT_OF_RANGE),
(false, true, false),
),
(
Decimal::from(-1024),
(4, 0),
(3, 0),
false,
Ok(Decimal::from(-999)),
Some(ERR_DATA_OUT_OF_RANGE),
(false, true, false),
),
(
Decimal::from_f64(10240.01).unwrap(),
(7, 2),
(5, 1),
false,
Ok(Decimal::from_f64(9999.9).unwrap()),
Some(ERR_DATA_OUT_OF_RANGE),
(false, true, false),
),
(
Decimal::from_f64(-10240.01).unwrap(),
(7, 2),
(5, 1),
false,
Ok(Decimal::from_f64(-9999.9).unwrap()),
Some(ERR_DATA_OUT_OF_RANGE),
(false, true, false),
),
// origin_decimal < res_decimal
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(7, 5),
false,
Ok(Decimal::from_f64(10.12340).unwrap()),
None,
(false, false, false),
),
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(7, 5),
false,
Ok(Decimal::from_f64(10.12340).unwrap()),
None,
(true, false, false),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(7, 5),
false,
Ok(Decimal::from_f64(-10.12340).unwrap()),
None,
(false, false, false),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(7, 5),
false,
Ok(Decimal::from_f64(-10.12340).unwrap()),
None,
(true, false, false),
),
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(7, 5),
true,
Ok(Decimal::from_f64(10.12340).unwrap()),
None,
(false, false, false),
),
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(7, 5),
true,
Ok(Decimal::from_f64(10.12340).unwrap()),
None,
(true, false, false),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(7, 5),
true,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(7, 5),
true,
Ok(Decimal::zero()),
None,
(true, false, false),
),
// origin_decimal > res_decimal
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(5, 3),
false,
Ok(Decimal::from_f64(10.123).unwrap()),
Some(WARN_DATA_TRUNCATED),
(false, false, true),
),
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(5, 3),
false,
Ok(Decimal::from_f64(10.123).unwrap()),
Some(WARN_DATA_TRUNCATED),
(true, false, false),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(5, 3),
false,
Ok(Decimal::from_f64(-10.123).unwrap()),
Some(WARN_DATA_TRUNCATED),
(false, false, true),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(5, 3),
false,
Ok(Decimal::from_f64(-10.123).unwrap()),
Some(WARN_DATA_TRUNCATED),
(true, false, false),
),
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(5, 3),
true,
Ok(Decimal::from_f64(10.123).unwrap()),
Some(WARN_DATA_TRUNCATED),
(false, false, true),
),
(
Decimal::from_f64(10.1234).unwrap(),
(6, 4),
(5, 3),
true,
Ok(Decimal::from_f64(10.123).unwrap()),
Some(WARN_DATA_TRUNCATED),
(true, false, false),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(5, 3),
true,
Ok(Decimal::zero()),
Some(WARN_DATA_TRUNCATED),
(false, false, true),
),
(
Decimal::from_f64(-10.1234).unwrap(),
(6, 4),
(5, 3),
true,
Ok(Decimal::zero()),
Some(WARN_DATA_TRUNCATED),
(true, false, false),
),
// if after round, the dec is zero, then there is no err or warning
(
Decimal::from_f64(0.00001).unwrap(),
(5, 5),
(4, 4),
false,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::from_f64(0.00001).unwrap(),
(5, 5),
(4, 4),
false,
Ok(Decimal::zero()),
None,
(true, false, false),
),
(
Decimal::from_f64(-0.00001).unwrap(),
(5, 5),
(4, 4),
false,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::from_f64(-0.00001).unwrap(),
(5, 5),
(4, 4),
false,
Ok(Decimal::zero()),
None,
(true, false, false),
),
(
Decimal::from_f64(0.00001).unwrap(),
(5, 5),
(4, 4),
true,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::from_f64(0.00001).unwrap(),
(5, 5),
(4, 4),
true,
Ok(Decimal::zero()),
None,
(true, false, false),
),
(
Decimal::from_f64(-0.00001).unwrap(),
(5, 5),
(4, 4),
true,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::from_f64(-0.00001).unwrap(),
(5, 5),
(4, 4),
true,
Ok(Decimal::zero()),
None,
(true, false, false),
),
// TODO: add test case for Decimal::round failure
// zero
// FIXME:
// according to Decimal::prec_and_frac,
// the decimals' prec(the number of all digits) and frac(the number of digit after number point) are
// Decimal::zero()'s is (1, 0)
// Decimal::from_bytes(b"00.00")'s is (2, 2)
// Decimal::from_bytes(b"000.00")'s is (2, 2)
// Decimal::from_bytes(b"000.00")'s is (2, 2)
// Decimal::from_bytes(b"00.000")'s is (3, 3)
// Decimal::from_bytes(b"00.0000")'s is (4, 4)
// Decimal::from_bytes(b"00.00000")'s is (5, 5)
// This may be a bug.
// However, the case below are based on these expect.
(
Decimal::from_bytes(b"0.00").unwrap().unwrap(),
(2, 2),
(ul, ul),
false,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::zero(),
(1, 0),
(0, 0),
false,
Ok(Decimal::zero()),
None,
(false, false, false),
),
(
Decimal::from_bytes(b"0.0000").unwrap().unwrap(),
(4, 4),
(4, 1),
false,
Ok(Decimal::zero()),
None,
(false, false, false),
),
];
for (
input,
(origin_flen, origin_decimal),
(res_flen, res_decimal),
is_unsigned,
expect,
warning_err_code,
(in_dml, overflow_as_warning, truncate_as_warning),
) in cs
{
// check origin_flen and origin_decimal
let (f, d) = input.prec_and_frac();
let log = format!(
"input: {}, origin_flen: {}, origin_decimal: {}, actual flen: {}, actual decimal: {}",
input, origin_flen, origin_decimal, f, d
);
assert_eq!(f, origin_flen, "{}", log);
assert_eq!(d, origin_decimal, "{}", log);
// run test case
let ctx_in_dml_flag = vec![Flag::IN_INSERT_STMT, Flag::IN_UPDATE_OR_DELETE_STMT];
for in_dml_flag in ctx_in_dml_flag {
// make ctx
let mut flag: Flag = Flag::empty();
if overflow_as_warning {
flag |= Flag::OVERFLOW_AS_WARNING;
}
if truncate_as_warning {
flag |= Flag::TRUNCATE_AS_WARNING;
}
if in_dml {
flag |= in_dml_flag;
}
let cfg = Arc::new(EvalConfig::from_flag(flag));
let mut ctx = EvalContext::new(cfg);
// make field_type
let mut rft = FieldType::default();
let fta = rft.as_mut_accessor();
fta.set_flen(res_flen);
fta.set_decimal(res_decimal);
if is_unsigned {
fta.set_flag(FieldTypeFlag::UNSIGNED);
}
// call produce_dec_with_specified_tp
let r = produce_dec_with_specified_tp(&mut ctx, input, &rft);
// make log
let rs = r.as_ref().map(|x| x.to_string());
let expect_str = expect.as_ref().map(|x| x.to_string());
let log = format!(
"input: {}, origin_flen: {}, origin_decimal: {}, \
res_flen: {}, res_decimal: {}, is_unsigned: {}, \
in_dml: {}, in_dml_flag(if in_dml is false, it will take no effect): {:?}, \
expect: {:?}, expect: {:?}",
input,
origin_flen,
origin_decimal,
res_flen,
res_decimal,
is_unsigned,
in_dml,
in_dml_flag,
expect_str,
rs
);
// check result
match &expect {
Ok(d) => {
assert!(r.is_ok(), "{}", log);
assert_eq!(&r.unwrap(), d, "{}", log);
}
Err(Error::Eval(_, _)) => {
if let Error::Eval(_, d) = r.err().unwrap() {
assert_eq!(d, ERR_M_BIGGER_THAN_D, "{}", log);
} else {
unreachable!("{}", log);
}
}
_ => unreachable!("{}", log),
}
// check warning
match warning_err_code {
Some(code) => {
assert_eq!(ctx.warnings.warning_cnt, 1, "{}", log);
assert_eq!(ctx.warnings.warnings[0].get_code(), code, "{}", log);
}
None => assert_eq!(ctx.warnings.warning_cnt, 0, "{}", log),
}
}
}
}
}<|fim▁end|> | }; |
<|file_name|>run.tsx<|end_file_name|><|fim▁begin|>import 'chrome-extension-async'
import { getHosts, getHostKey, findMatchedHosts } from 'libs'
const baseURL = chrome.runtime.getURL('base.js')
const isValidURL = (url) => {
return (
url &&
(url.startsWith('//') ||
url.startsWith('https://') ||
url.startsWith('http://'))
)
}
const catchErr = (e) => {
console.error('Failed to inject scripts:', e)
}
const injectScriptPromise = (src, where = 'head') => {
return new Promise((resolve, reject) => {
const elm = document.createElement('script')
const targetElement = document[where] ? document[where] : document.head
targetElement.appendChild(elm)
elm.onload = () => {
resolve(`Inject ${src} complete!`)
}
elm.src = src
})
}
<|fim▁hole|>const extractScripts = (customjs, injections) => {
if (!customjs) {
return
}
const {
config: { enable, include, extra },
source
} = customjs
if (!enable) {
return
}
// base.js to provide useful functions
injections.add(baseURL)
// Predefined include
if (include) {
injections.add('https://ajax.googleapis.com/ajax/libs' + include)
}
// Extra include
;(extra || '')
.split(';')
.map((x) => x.trim())
.forEach((line) => {
if (isValidURL(line)) {
injections.add(line)
}
})
return source
}
const loadScripts = async (location) => {
const hosts = await getHosts()
const matchedHosts = findMatchedHosts(hosts, location)
const injections = new Set()
Promise.all(
matchedHosts.map(async (host) => {
const hostKey = getHostKey(host)
const obj = await chrome.storage.sync.get(hostKey)
return extractScripts(obj[hostKey], injections)
})
)
.then((values) => values.filter((x) => x))
.then((values) => {
if (values.length) {
console.info(
'Custom JavaScript for websites enabled.\nPlease visit https://xcv58.xyz/inject-js if you have any issue.'
)
}
return Promise.all(
[...injections].map((src) => src && injectScriptPromise(src))
)
.then(() => values)
.catch(catchErr)
})
.then((values) => values.map((src) => injectScriptPromise(src, 'body')))
.catch(catchErr)
}
loadScripts(window.location)<|fim▁end|> | |
<|file_name|>ifaceprom.go<|end_file_name|><|fim▁begin|>package main
// Test of promotion of methods of an interface embedded within a
// struct. In particular, this test exercises that the correct
// method is called.
type I interface {
one() int
two() string
}
type S struct {
I
}
type impl struct{}
func (impl) one() int {
return 1
}
func (impl) two() string {
return "two"
}
func main() {
var s S
s.I = impl{}
if one := s.I.one(); one != 1 {
panic(one)
}
if one := s.one(); one != 1 {
panic(one)
}
closOne := s.I.one
if one := closOne(); one != 1 {
panic(one)
}
closOne = s.one
if one := closOne(); one != 1 {
panic(one)
}
if two := s.I.two(); two != "two" {
panic(two)
}
if two := s.two(); two != "two" {
panic(two)
}
closTwo := s.I.two
if two := closTwo(); two != "two" {
panic(two)
}
closTwo = s.two
if two := closTwo(); two != "two" {
panic(two)
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>tst_qguivariant.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.<|fim▁hole|>** This file is part of the test suite of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** No Commercial Usage
** This file contains pre-release code and may not be distributed.
** You may use this file in accordance with the terms and conditions
** contained in the Technology Preview License Agreement accompanying
** this package.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** If you have questions regarding the use of this file, please contact
** Nokia at [email protected].
**
**
**
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QtTest/QtTest>
#include <qvariant.h>
class tst_QGuiVariant : public QObject
{
Q_OBJECT
public:
tst_QGuiVariant();
private slots:
void variantWithoutApplication();
};
tst_QGuiVariant::tst_QGuiVariant()
{}
void tst_QGuiVariant::variantWithoutApplication()
{
QVariant v = QString("red");
QVERIFY(qvariant_cast<QColor>(v) == QColor(Qt::red));
}
QTEST_APPLESS_MAIN(tst_QGuiVariant)
#include "tst_qguivariant.moc"<|fim▁end|> | ** Contact: Nokia Corporation ([email protected])
** |
<|file_name|>sensors.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# MODIFIED FROM ORIGINAL VERSION
#
# This file is not the same as in pypi. It includes a pull request to fix py3
# incompabilities that never ended up getting merged.
###############################################################################<|fim▁hole|>from ctypes.util import find_library
from psistats.libsensors.lib import stdc
version_info = (0, 0, 3)
__version__ = '.'.join(map(str, version_info))
__date__ = '2014-08-17'
__author__ = "Marc 'BlackJack' Rintsch"
__contact__ = '[email protected]'
__license__ = 'LGPL v2.1'
API_VERSION = 4
DEFAULT_CONFIG_FILENAME = '/etc/sensors3.conf'
LIB_FILENAME = os.environ.get('SENSORS_LIB') or find_library('sensors')
SENSORS_LIB = CDLL(LIB_FILENAME)
VERSION = c_char_p.in_dll(SENSORS_LIB, 'libsensors_version').value
MAJOR_VERSION = version_info[0]
class SensorsError(Exception):
def __init__(self, message, error_number=None):
Exception.__init__(self, message)
self.error_number = error_number
def _error_check(result, _func, _arguments):
if result < 0:
raise SensorsError(_strerror(result), result)
return result
_strerror = SENSORS_LIB.sensors_strerror
_strerror.argtypes = [c_int]
_strerror.restype = c_char_p
_init = SENSORS_LIB.sensors_init
_init.argtypes = [c_void_p]
_init.restype = c_int
_init.errcheck = _error_check
cleanup = SENSORS_LIB.sensors_cleanup
cleanup.argtypes = None
cleanup.restype = None
SENSORS_FEATURE_IN = 0x00
SENSORS_FEATURE_FAN = 0x01
SENSORS_FEATURE_TEMP = 0x02
SENSORS_FEATURE_POWER = 0x03
SENSORS_FEATURE_ENERGY = 0x04
SENSORS_FEATURE_CURR = 0x05
SENSORS_FEATURE_HUMIDITY = 0x06
# SENSORS_FEATURE_MAX_MAIN
SENSORS_FEATURE_VID = 0x10
SENSORS_FEATURE_INTRUSION = 0x11
#SENSORS_FEATURE_MAX_OTHER,
SENSORS_FEATURE_BEEP_ENABLE = 0x18
#SENSORS_FEATURE_MAX,
#SENSORS_FEATURE_UNKNOWN = INT_MAX
def init(config_filename=DEFAULT_CONFIG_FILENAME):
file_p = stdc.fopen(config_filename.encode('utf-8'), b'r')
if file_p is None:
error_number = get_errno()
raise OSError(error_number, os.strerror(error_number), config_filename)
try:
_init(file_p)
finally:
stdc.fclose(file_p)
class Subfeature(Structure):
_fields_ = [
('name', c_char_p),
('number', c_int),
('type', c_int),
('mapping', c_int),
('flags', c_uint),
]
def __repr__(self):
return '<%s name=%r number=%d type=%d mapping=%d flags=%08x>' % (
self.__class__.__name__,
self.name,
self.number,
self.type,
self.mapping,
self.flags
)
def get_value(self):
result = c_double()
_get_value(byref(self.parent.chip), self.number, byref(result))
return result.value
SUBFEATURE_P = POINTER(Subfeature)
class Feature(Structure):
_fields_ = [
('name', c_char_p),
('number', c_int),
('type', c_int),
('_first_subfeature', c_int),
('_padding1', c_int),
]
def __repr__(self):
return '<%s name=%r number=%r type=%r>' % (
self.__class__.__name__,
self.name,
self.number,
self.type
)
def __iter__(self):
number = c_int(0)
while True:
result_p = _get_all_subfeatures(
byref(self.chip),
byref(self),
byref(number)
)
if not result_p:
break
result = result_p.contents
result.chip = self.chip
result.parent = self
yield result
@property
def label(self):
#
# TODO Maybe this is a memory leak!
#
return _get_label(byref(self.chip), byref(self)).decode('utf-8')
def get_value(self):
#
# TODO Is the first always the correct one for all feature types?
#
return next(iter(self)).get_value()
FEATURE_P = POINTER(Feature)
class Bus(Structure):
TYPE_ANY = -1
NR_ANY = -1
_fields_ = [
('type', c_short),
('nr', c_short),
]
def __str__(self):
return (
'*' if self.type == self.TYPE_ANY
else _get_adapter_name(byref(self)).decode('utf-8')
)
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.type, self.nr)
@property
def has_wildcards(self):
return self.type == self.TYPE_ANY or self.nr == self.NR_ANY
BUS_P = POINTER(Bus)
class Chip(Structure):
#
# TODO Move common stuff into `AbstractChip` class.
#
_fields_ = [
('prefix', c_char_p),
('bus', Bus),
('addr', c_int),
('path', c_char_p),
]
PREFIX_ANY = None
ADDR_ANY = -1
def __new__(cls, *args):
result = super(Chip, cls).__new__(cls)
if args:
_parse_chip_name(args[0].encode('utf-8'), byref(result))
return result
def __init__(self, *_args):
Structure.__init__(self)
#
# Need to bind the following to the instance so it is available in
# `__del__()` when the interpreter shuts down.
#
self._free_chip_name = _free_chip_name
self.byref = byref
def __del__(self):
if self._b_needsfree_:
self._free_chip_name(self.byref(self))
def __repr__(self):
return '<%s prefix=%r bus=%r addr=%r path=%r>' % (
(
self.__class__.__name__,
self.prefix,
self.bus,
self.addr,
self.path
)
)
def __str__(self):
buffer_size = 200
result = create_string_buffer(buffer_size)
used = _snprintf_chip_name(result, len(result), byref(self))
assert used < buffer_size
return result.value.decode('utf-8')
def __iter__(self):
number = c_int(0)
while True:
result_p = _get_features(byref(self), byref(number))
if not result_p:
break
result = result_p.contents
result.chip = self
yield result
@property
def adapter_name(self):
return str(self.bus)
@property
def has_wildcards(self):
return (
self.prefix == self.PREFIX_ANY
or self.addr == self.ADDR_ANY
or self.bus.has_wildcards
)
CHIP_P = POINTER(Chip)
_parse_chip_name = SENSORS_LIB.sensors_parse_chip_name
_parse_chip_name.argtypes = [c_char_p, CHIP_P]
_parse_chip_name.restype = c_int
_parse_chip_name.errcheck = _error_check
_free_chip_name = SENSORS_LIB.sensors_free_chip_name
_free_chip_name.argtypes = [CHIP_P]
_free_chip_name.restype = None
_snprintf_chip_name = SENSORS_LIB.sensors_snprintf_chip_name
_snprintf_chip_name.argtypes = [c_char_p, c_size_t, CHIP_P]
_snprintf_chip_name.restype = c_int
_snprintf_chip_name.errcheck = _error_check
_get_adapter_name = SENSORS_LIB.sensors_get_adapter_name
_get_adapter_name.argtypes = [BUS_P]
_get_adapter_name.restype = c_char_p
_get_label = SENSORS_LIB.sensors_get_label
_get_label.argtypes = [CHIP_P, FEATURE_P]
_get_label.restype = c_char_p
_get_value = SENSORS_LIB.sensors_get_value
_get_value.argtypes = [CHIP_P, c_int, POINTER(c_double)]
_get_value.restype = c_int
_get_value.errcheck = _error_check
#
# TODO sensors_set_value()
# TODO sensors_do_chip_sets()
#
_get_detected_chips = SENSORS_LIB.sensors_get_detected_chips
_get_detected_chips.argtypes = [CHIP_P, POINTER(c_int)]
_get_detected_chips.restype = CHIP_P
_get_features = SENSORS_LIB.sensors_get_features
_get_features.argtypes = [CHIP_P, POINTER(c_int)]
_get_features.restype = FEATURE_P
_get_all_subfeatures = SENSORS_LIB.sensors_get_all_subfeatures
_get_all_subfeatures.argtypes = [CHIP_P, FEATURE_P, POINTER(c_int)]
_get_all_subfeatures.restype = SUBFEATURE_P
#
# TODO sensors_get_subfeature() ?
#
def iter_detected_chips(chip_name='*-*'):
chip = Chip(chip_name)
number = c_int(0)
while True:
result = _get_detected_chips(byref(chip), byref(number))
if not result:
break
yield result.contents<|fim▁end|> | import os
from ctypes import CDLL, c_char_p, c_int, c_void_p, c_uint, c_double, byref, Structure, get_errno,\
POINTER, c_short, c_size_t, create_string_buffer |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from flask.ext.wtf import Form
from wtforms import TextField, TextAreaField, SubmitField, PasswordField, validators, ValidationError, SelectField
class AWSIEFormFields(Form):
deviceId = TextField(u'Device ID', validators=[validators.required()])
notificationEmail = TextField("Notification Email", [validators.email("Please enter 7 characters to search")])
region = SelectField(u'Region', choices=[('us-east-1', 'us-east-1'), ('us-west-1', 'us-west-1'),
('us-west-2', 'us-west-2'), ('eu-west-1 ', 'eu-west-1'), ('ap-southeast-1', 'ap-southeast-1')])
acl = TextField("ACL", [validators.email("Please enter an ACL if you like")])
bucket = TextField("Import Bucket", [validators.required("Please enter the bucket name for importing files")])
logPrefix = TextField("Log Prefix", [validators.required("Please enter a log prefix")])
prefix = TextField("Prefix", [validators.required("Please enter a prefix")])
substitutions = TextField("Replace box1 with box2")
substitutionsb = TextField("Ignore directories")
substitutions2 = TextField("Replace box1 with box2")
substitutions2b = TextField("Ignore directories")
logBucket = TextField("Log Bucket", [validators.required("Please enter the bucket name for job logs")])
trueCryptPassword = TextField("TrueCrypt Password")
pinCode = TextField("Pin Code")
cacheControl = TextField("Cache Control")
contentDisposition = TextField("Content Disposition")
contentLanguage = TextField("Content Language")
contentTypes = SelectField(u'Map Content types', choices=[('', ''), ('yes', 'yes')])
diskTimestampMetadataKey = TextField("Disk Time Stamp Metadata Key")
expires = TextField("Expires")
ignore = SelectField(u'Exclude Lost+Found', choices=[('', ''), ('yes', 'yes')])
ignore2 = SelectField(u'Exclude Recycle Bin', choices=[('', ''), ('yes', 'yes')])
ignore3 = SelectField(u'Exclude ~ and .swp files', choices=[('', ''), ('yes', 'yes')])
ignore4 = TextField("Ignore directories")
ignore5 = TextField("Ignore directories")
ignore6 = TextField("Ignore files with specific extension")
ignore7 = TextField("Ignore files with specific extension")
setContentEncodingForGzFiles = SelectField(u'Set Encoding for .gz files', choices=[('', ''), ('yes', 'Yes')])
staticMetadata = TextField("Static Metadata")
storageClass = SelectField(u'Select Storage Class', choices=[('', ''), ('REDUCED_REDUNDANCY', 'REDUCED_REDUNDANCY')])
serviceLevel = SelectField(u'Expedite return shipping', choices=[('', ''), ('expeditedShipping', 'Expedited Shipping'), ('standard', 'standard Shipping')])
name = TextField("Name", [validators.required("Please enter your name, it's required")])
company = TextField("Company")
street1 = TextField("Street1", [validators.required("Please enter your street, it's required")])
street2 = TextField("Street2")
street3 = TextField("Street3")
city = TextField("City", [validators.required("Please enter your city, it's required")])
stateOrProvince = TextField("State or Province", [validators.required("Please enter your state or province, it's required")])
postalCode = TextField("Postal Code", [validators.required("Please enter your postal code, it's required")])
phoneNumber = TextField("Phone Number", [validators.required("Please enter your phone number, it's required")])
country = TextField("Country", [validators.required("Please enter your country, it's required")])
dataDescription = TextField("Description of The Data", [validators.required("Please enter a description, it's required")])
encryptedData = SelectField(u'Encrypted Data', choices=[('', ''), ('Yes', 'Yes'), ('No', 'No')])
exportCertifierName = TextField("Shipper Name", [validators.required("Please enter a name, it's required")])
requiresExportLicense = SelectField(u'Requires Export License', choices=[('', ''), ('Yes', 'Yes'), ('No', 'No')])
deviceValue = TextField("Device Value", [validators.required("Please enter a value, it's required")])
deviceCountryOfOrigin = TextField("Drive Manufacture Country", [validators.required("Please a country, it's required")])
deviceType = SelectField(u'Device Type', choices=[('', ''), ('externalStorageDevice', 'externalStorageDevice'), ('usbFlashDrive', 'usbFlashDrive'), ('sataDrive', 'sataDrive')])<|fim▁hole|> archivecomment = TextField("Archive Comment")
fileSystem = SelectField(u'File System', choices=[('', ''), ('NTFS', 'NTFS'), ('EXT4', 'EXT4')])
submit = SubmitField("Generate")<|fim▁end|> | typeOfExport = SelectField(u'Type of Export', choices=[('', ''), ('return', 'return'), ('permanent', 'permanent'), ('temporary', 'temporary')]) |
<|file_name|>SetRasterStyle.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
SetRasterStyle.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsProcessingAlgorithm,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterFile,
QgsProcessingOutputRasterLayer)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
class SetRasterStyle(QgisAlgorithm):
INPUT = 'INPUT'
STYLE = 'STYLE'
OUTPUT = 'OUTPUT'
def group(self):
return self.tr('Raster tools')
def groupId(self):
return 'rastertools'
def __init__(self):
super().__init__()
def flags(self):
return super().flags() | QgsProcessingAlgorithm.FlagNoThreading | QgsProcessingAlgorithm.FlagDeprecated<|fim▁hole|> self.tr('Raster layer')))
self.addParameter(QgsProcessingParameterFile(self.STYLE,
self.tr('Style file'), extension='qml'))
self.addOutput(QgsProcessingOutputRasterLayer(self.INPUT, self.tr('Styled')))
def name(self):
return 'setstyleforrasterlayer'
def displayName(self):
return self.tr('Set style for raster layer')
def processAlgorithm(self, parameters, context, feedback):
layer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
style = self.parameterAsFile(parameters, self.STYLE, context)
with open(style) as f:
xml = "".join(f.readlines())
d = QDomDocument()
d.setContent(xml)
layer.importNamedStyle(d)
layer.triggerRepaint()
return {self.INPUT: layer}<|fim▁end|> |
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT, |
<|file_name|>clean.py<|end_file_name|><|fim▁begin|>"""
Functions performing URL trimming and cleaning
"""
## This file is available from https://github.com/adbar/courlan
## under GNU GPL v3 license
import logging
import re
from collections import OrderedDict
from urllib.parse import parse_qs, urlencode, urlparse, ParseResult
from .filters import validate_url
from .settings import ALLOWED_PARAMS, CONTROL_PARAMS,\
TARGET_LANG_DE, TARGET_LANG_EN
PROTOCOLS = re.compile(r'https?://')
SELECTION = re.compile(r'(https?://[^">&? ]+?)(?:https?://)|(?:https?://[^/]+?/[^/]+?[&?]u(rl)?=)(https?://[^"> ]+)')
MIDDLE_URL = re.compile(r'https?://.+?(https?://.+?)(?:https?://|$)')
NETLOC_RE = re.compile(r'(?<=\w):(?:80|443)')
PATH1 = re.compile(r'/+')
PATH2 = re.compile(r'^(?:/\.\.(?![^/]))+')
def clean_url(url, language=None):
'''Helper function: chained scrubbing and normalization'''
try:
return normalize_url(scrub_url(url), language)
except (AttributeError, ValueError):
return None
def scrub_url(url):
'''Strip unnecessary parts and make sure only one URL is considered'''
# trim
# https://github.com/cocrawler/cocrawler/blob/main/cocrawler/urls.py
# remove leading and trailing white space and unescaped control chars
url = url.strip('\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'
'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f \r\n')
# clean the input string
url = url.replace('[ \t]+', '')
# <![CDATA[http://www.urbanlife.de/item/260-bmw-i8-hybrid-revolution-unter-den-sportwagen.html]]>
if url.startswith('<![CDATA['): # re.match(r'<!\[CDATA\[', url):
url = url.replace('<![CDATA[', '') # url = re.sub(r'^<!\[CDATA\[', '', url)
url = url.replace(']]>', '') # url = re.sub(r'\]\]>$', '', url)
# markup rests
url = re.sub(r'</?a>', '', url)
# &
if '&' in url:
url = url.replace('&', '&')
#if '"' in link:
# link = link.split('"')[0]
# double/faulty URLs
protocols = PROTOCOLS.findall(url)
if len(protocols) > 1 and not 'web.archive.org' in url:
logging.debug('double url: %s %s', len(protocols), url)
match = SELECTION.match(url)
if match and validate_url(match.group(1))[0] is True:
url = match.group(1)
logging.debug('taking url: %s', url)<|fim▁hole|> match = MIDDLE_URL.match(url)
if match and validate_url(match.group(1))[0] is True:
url = match.group(1)
logging.debug('taking url: %s', url)
# too long and garbled URLs e.g. due to quotes URLs
# https://github.com/cocrawler/cocrawler/blob/main/cocrawler/urls.py
if len(url) > 500: # arbitrary choice
match = re.match(r'(.*?)[<>"\'\r\n ]', url)
if match:
url = match.group(1)
if len(url) > 500:
logging.debug('invalid-looking link %s of length %d',
url[:50] + '...', len(url))
# trailing ampersand
url = url.strip('&')
# trailing slashes in URLs without path or in embedded URLs
if url.count('/') == 3 or url.count('://') > 1:
url = url.rstrip('/')
# lower
# url = url.lower()
return url
def clean_query(parsed_url, strict=False, language=None):
'''Strip unwanted query elements'''
if len(parsed_url.query) > 0:
qdict = parse_qs(parsed_url.query)
newqdict = OrderedDict()
for qelem in sorted(qdict.keys()):
teststr = qelem.lower()
# control param
if strict is True and \
teststr not in ALLOWED_PARAMS and teststr not in CONTROL_PARAMS:
continue
# control language
if language is not None and teststr in CONTROL_PARAMS:
found_lang = str(qdict[qelem][0])
if (language == 'de' and found_lang not in TARGET_LANG_DE) or \
(language == 'en' and found_lang not in TARGET_LANG_EN) or \
found_lang != language:
logging.debug('bad lang: %s %s %s', language, qelem, found_lang)
raise ValueError
# insert
newqdict[qelem] = qdict[qelem]
newstring = urlencode(newqdict, doseq=True)
parsed_url = parsed_url._replace(query=newstring)
return parsed_url
def normalize_url(parsed_url, strict=False, language=None):
'''Takes a URL string or a parsed URL and returns a (basically) normalized URL string'''
if not isinstance(parsed_url, ParseResult):
parsed_url = urlparse(parsed_url)
# port
if parsed_url.port is not None and parsed_url.port in (80, 443):
parsed_url = parsed_url._replace(netloc=NETLOC_RE.sub('', parsed_url.netloc))
# path: https://github.com/saintamh/alcazar/blob/master/alcazar/utils/urls.py
newpath = PATH1.sub('/', parsed_url.path)
# Leading /../'s in the path are removed
newpath = PATH2.sub('', newpath)
# fragment
if strict is True:
newfragment = ''
else:
newfragment = parsed_url.fragment
# lowercase + remove fragments
parsed_url = parsed_url._replace(
scheme=parsed_url.scheme.lower(),
netloc=parsed_url.netloc.lower(),
path=newpath,
fragment=newfragment
)
# strip unwanted query elements
parsed_url = clean_query(parsed_url, strict, language)
# rebuild
return parsed_url.geturl()<|fim▁end|> | else: |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import HTMLParser
import json
from xml.etree import ElementTree
from django.conf import settings
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseBadRequest, Http404
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.template.loader import render_to_string
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _, ugettext_noop
from django.views.decorators.cache import cache_page
from django.views.generic import View
from couchdbkit import ResourceConflict
from casexml.apps.case.models import CASE_STATUS_OPEN
from casexml.apps.case.xml import V2
from casexml.apps.phone.fixtures import generator
from corehq.form_processor.utils import should_use_sql_backend
from corehq.form_processor.utils.general import use_sqlite_backend
from dimagi.utils.logging import notify_exception
from dimagi.utils.parsing import string_to_boolean
from dimagi.utils.web import json_response, get_url_base, json_handler
from touchforms.formplayer.api import DjangoAuth, get_raw_instance, sync_db
from touchforms.formplayer.models import EntrySession
from xml2json.lib import xml2json
from corehq import toggles, privileges
from corehq.apps.accounting.decorators import requires_privilege_for_commcare_user, requires_privilege_with_fallback
from corehq.apps.app_manager.dbaccessors import (
get_latest_build_doc,
get_brief_apps_in_domain,
get_latest_released_app_doc,
get_app_ids_in_domain,
get_current_app,
wrap_app,
)
from corehq.apps.app_manager.exceptions import FormNotFoundException, ModuleNotFoundException
from corehq.apps.app_manager.models import Application, ApplicationBase, RemoteApp
from corehq.apps.app_manager.suite_xml.sections.details import get_instances_for_module
from corehq.apps.app_manager.suite_xml.sections.entries import EntriesHelper
from corehq.apps.app_manager.util import get_cloudcare_session_data
from corehq.apps.cloudcare.api import (
api_closed_to_status,
CaseAPIResult,
get_app_json,
get_filtered_cases,
get_filters_from_request_params,
get_open_form_sessions,
look_up_app_json,
)
from corehq.apps.cloudcare.dbaccessors import get_cloudcare_apps
from corehq.apps.cloudcare.decorators import require_cloudcare_access
from corehq.apps.cloudcare.exceptions import RemoteAppError
from corehq.apps.cloudcare.models import ApplicationAccess
from corehq.apps.cloudcare.touchforms_api import BaseSessionDataHelper, CaseSessionDataHelper
from corehq.apps.domain.decorators import login_and_domain_required, login_or_digest_ex, domain_admin_required
from corehq.apps.groups.models import Group
from corehq.apps.reports.formdetails import readable
from corehq.apps.style.decorators import (
use_datatables,
use_jquery_ui,
)
from corehq.apps.users.models import CouchUser, CommCareUser
from corehq.apps.users.views import BaseUserSettingsView
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors, FormAccessors, LedgerAccessors
from corehq.form_processor.exceptions import XFormNotFound, CaseNotFound
from corehq.util.quickcache import skippable_quickcache
from corehq.util.xml_utils import indent_xml
from corehq.apps.analytics.tasks import track_clicked_preview_on_hubspot
from corehq.apps.analytics.utils import get_meta
@require_cloudcare_access
def default(request, domain):
return HttpResponseRedirect(reverse('cloudcare_main', args=[domain, '']))
def insufficient_privilege(request, domain, *args, **kwargs):
context = {
'domain': domain,
}
return render(request, "cloudcare/insufficient_privilege.html", context)
class CloudcareMain(View):
@use_datatables
@use_jquery_ui
@method_decorator(require_cloudcare_access)
@method_decorator(requires_privilege_for_commcare_user(privileges.CLOUDCARE))
def dispatch(self, request, *args, **kwargs):
return super(CloudcareMain, self).dispatch(request, *args, **kwargs)
def get(self, request, domain, urlPath):
try:
preview = string_to_boolean(request.GET.get("preview", "false"))
except ValueError:
# this is typically only set at all if it's intended to be true so this
# is a reasonable default for "something went wrong"
preview = True
app_access = ApplicationAccess.get_by_domain(domain)
accessor = CaseAccessors(domain)
if not preview:
apps = get_cloudcare_apps(domain)
if request.project.use_cloudcare_releases:
if (toggles.CLOUDCARE_LATEST_BUILD.enabled(domain) or
toggles.CLOUDCARE_LATEST_BUILD.enabled(request.couch_user.username)):
get_cloudcare_app = get_latest_build_doc
else:
get_cloudcare_app = get_latest_released_app_doc
apps = map(
lambda app: get_cloudcare_app(domain, app['_id']),
apps,
)
apps = filter(None, apps)
apps = map(wrap_app, apps)
# convert to json
apps = [get_app_json(app) for app in apps]
else:
# legacy functionality - use the latest build regardless of stars
apps = [get_latest_build_doc(domain, app['_id']) for app in apps]
apps = [get_app_json(ApplicationBase.wrap(app)) for app in apps if app]
else:
# big TODO: write a new apps view for Formplayer, can likely cut most out now
if toggles.USE_FORMPLAYER_FRONTEND.enabled(domain):
apps = get_cloudcare_apps(domain)
else:
apps = get_brief_apps_in_domain(domain)
apps = [get_app_json(app) for app in apps if app and (
isinstance(app, RemoteApp) or app.application_version == V2)]
meta = get_meta(request)
track_clicked_preview_on_hubspot(request.couch_user, request.COOKIES, meta)
# trim out empty apps
apps = filter(lambda app: app, apps)
apps = filter(lambda app: app_access.user_can_access_app(request.couch_user, app), apps)
def _default_lang():
if apps:
# unfortunately we have to go back to the DB to find this
return Application.get(apps[0]["_id"]).default_language
else:
return "en"
# default language to user's preference, followed by
# first app's default, followed by english
language = request.couch_user.language or _default_lang()
def _url_context():
# given a url path, returns potentially the app, parent, and case, if
# they're selected. the front end optimizes with these to avoid excess
# server calls
# there's an annoying dependency between this logic and backbone's
# url routing that seems hard to solve well. this needs to be synced
# with apps.js if anything changes
# for apps anything with "view/app/" works
# for cases it will be:
# "view/:app/:module/:form/case/:case/"
# if there are parent cases, it will be:<|fim▁hole|>
# could use regex here but this is actually simpler with the potential
# absence of a trailing slash
split = urlPath.split('/')
app_id = split[1] if len(split) >= 2 else None
if len(split) >= 5 and split[4] == "parent":
parent_id = split[5]
case_id = split[7] if len(split) >= 7 else None
else:
parent_id = None
case_id = split[5] if len(split) >= 6 else None
app = None
if app_id:
if app_id in [a['_id'] for a in apps]:
app = look_up_app_json(domain, app_id)
else:
messages.info(request, _("That app is no longer valid. Try using the "
"navigation links to select an app."))
if app is None and len(apps) == 1:
app = look_up_app_json(domain, apps[0]['_id'])
def _get_case(domain, case_id):
case = accessor.get_case(case_id)
assert case.domain == domain, "case %s not in %s" % (case_id, domain)
return case.to_api_json()
case = _get_case(domain, case_id) if case_id else None
if parent_id is None and case is not None:
parent_id = case.get('indices', {}).get('parent', {}).get('case_id', None)
parent = _get_case(domain, parent_id) if parent_id else None
return {
"app": app,
"case": case,
"parent": parent
}
context = {
"domain": domain,
"language": language,
"apps": apps,
"apps_raw": apps,
"preview": preview,
"maps_api_key": settings.GMAPS_API_KEY,
"sessions_enabled": request.couch_user.is_commcare_user(),
"use_cloudcare_releases": request.project.use_cloudcare_releases,
"username": request.user.username,
"formplayer_url": settings.FORMPLAYER_URL,
'use_sqlite_backend': use_sqlite_backend(domain),
}
context.update(_url_context())
if toggles.USE_FORMPLAYER_FRONTEND.enabled(domain):
return render(request, "cloudcare/formplayer_home.html", context)
else:
return render(request, "cloudcare/cloudcare_home.html", context)
class FormplayerMain(View):
preview = False
urlname = 'formplayer_main'
@use_datatables
@use_jquery_ui
@method_decorator(require_cloudcare_access)
@method_decorator(requires_privilege_for_commcare_user(privileges.CLOUDCARE))
def dispatch(self, request, *args, **kwargs):
return super(FormplayerMain, self).dispatch(request, *args, **kwargs)
def fetch_app(self, domain, app_id):
username = self.request.couch_user.username
if (toggles.CLOUDCARE_LATEST_BUILD.enabled(domain) or
toggles.CLOUDCARE_LATEST_BUILD.enabled(username)):
return get_latest_build_doc(domain, app_id)
else:
return get_latest_released_app_doc(domain, app_id)
def get(self, request, domain):
app_access = ApplicationAccess.get_by_domain(domain)
app_ids = get_app_ids_in_domain(domain)
apps = map(
lambda app_id: self.fetch_app(domain, app_id),
app_ids,
)
apps = filter(None, apps)
apps = filter(lambda app: app['cloudcare_enabled'] or self.preview, apps)
apps = filter(lambda app: app_access.user_can_access_app(request.couch_user, app), apps)
apps = sorted(apps, key=lambda app: app['name'])
def _default_lang():
try:
return apps[0]['langs'][0]
except Exception:
return 'en'
# default language to user's preference, followed by
# first app's default, followed by english
language = request.couch_user.language or _default_lang()
context = {
"domain": domain,
"language": language,
"apps": apps,
"maps_api_key": settings.GMAPS_API_KEY,
"username": request.user.username,
"formplayer_url": settings.FORMPLAYER_URL,
"single_app_mode": False,
"home_url": reverse(self.urlname, args=[domain]),
}
return render(request, "cloudcare/formplayer_home.html", context)
class FormplayerMainPreview(FormplayerMain):
preview = True
urlname = 'formplayer_main_preview'
def fetch_app(self, domain, app_id):
return get_current_app(domain, app_id)
class FormplayerPreviewSingleApp(View):
urlname = 'formplayer_single_app'
@use_datatables
@use_jquery_ui
@method_decorator(require_cloudcare_access)
@method_decorator(requires_privilege_for_commcare_user(privileges.CLOUDCARE))
def dispatch(self, request, *args, **kwargs):
return super(FormplayerPreviewSingleApp, self).dispatch(request, *args, **kwargs)
def get(self, request, domain, app_id, **kwargs):
app_access = ApplicationAccess.get_by_domain(domain)
app = get_current_app(domain, app_id)
if not app_access.user_can_access_app(request.couch_user, app):
raise Http404()
def _default_lang():
try:
return app['langs'][0]
except Exception:
return 'en'
# default language to user's preference, followed by
# first app's default, followed by english
language = request.couch_user.language or _default_lang()
context = {
"domain": domain,
"language": language,
"apps": [app],
"maps_api_key": settings.GMAPS_API_KEY,
"username": request.user.username,
"formplayer_url": settings.FORMPLAYER_URL,
"single_app_mode": True,
"home_url": reverse(self.urlname, args=[domain, app_id]),
}
return render(request, "cloudcare/formplayer_home.html", context)
@login_and_domain_required
@requires_privilege_for_commcare_user(privileges.CLOUDCARE)
def form_context(request, domain, app_id, module_id, form_id):
app = Application.get(app_id)
form_url = '{}{}'.format(
settings.CLOUDCARE_BASE_URL or get_url_base(),
reverse('download_xform', args=[domain, app_id, module_id, form_id])
)
case_id = request.GET.get('case_id')
instance_id = request.GET.get('instance_id')
try:
form = app.get_module(module_id).get_form(form_id)
except (FormNotFoundException, ModuleNotFoundException):
raise Http404()
form_name = form.name.values()[0]
# make the name for the session we will use with the case and form
session_name = u'{app} > {form}'.format(
app=app.name,
form=form_name,
)
if case_id:
case = CaseAccessors(domain).get_case(case_id)
session_name = u'{0} - {1}'.format(session_name, case.name)
root_context = {
'form_url': form_url,
}
if instance_id:
try:
root_context['instance_xml'] = FormAccessors(domain).get_form(instance_id).get_xml()
except XFormNotFound:
raise Http404()
session_extras = {'session_name': session_name, 'app_id': app._id}
session_extras.update(get_cloudcare_session_data(domain, form, request.couch_user))
delegation = request.GET.get('task-list') == 'true'
session_helper = CaseSessionDataHelper(domain, request.couch_user, case_id, app, form, delegation=delegation)
return json_response(session_helper.get_full_context(
root_context,
session_extras
))
cloudcare_api = login_or_digest_ex(allow_cc_users=True)
def get_cases_vary_on(request, domain):
request_params = request.GET
return [
request.couch_user.get_id
if request.couch_user.is_commcare_user() else request_params.get('user_id', ''),
request_params.get('ids_only', 'false'),
request_params.get('case_id', ''),
request_params.get('footprint', 'false'),
request_params.get('closed', 'false'),
json.dumps(get_filters_from_request_params(request_params)),
domain,
]
def get_cases_skip_arg(request, domain):
"""
When this function returns True, skippable_quickcache will not go to the cache for the result. By default,
if neither of these params are passed into the function, nothing will be cached. Cache will always be
skipped if ids_only is false.
The caching is mainly a hack for touchforms to respond more quickly. Touchforms makes repeated requests to
get the list of case_ids associated with a user.
"""
if not toggles.CLOUDCARE_CACHE.enabled(domain):
return True
request_params = request.GET
return (not string_to_boolean(request_params.get('use_cache', 'false')) or
not string_to_boolean(request_params.get('ids_only', 'false')))
@cloudcare_api
@skippable_quickcache(get_cases_vary_on, get_cases_skip_arg, timeout=240 * 60)
def get_cases(request, domain):
request_params = request.GET
if request.couch_user.is_commcare_user():
user_id = request.couch_user.get_id
else:
user_id = request_params.get("user_id", "")
if not user_id and not request.couch_user.is_web_user():
return HttpResponseBadRequest("Must specify user_id!")
ids_only = string_to_boolean(request_params.get("ids_only", "false"))
case_id = request_params.get("case_id", "")
footprint = string_to_boolean(request_params.get("footprint", "false"))
accessor = CaseAccessors(domain)
if toggles.HSPH_HACK.enabled(domain):
hsph_case_id = request_params.get('hsph_hack', None)
if hsph_case_id != 'None' and hsph_case_id and user_id:
case = accessor.get_case(hsph_case_id)
usercase_id = CommCareUser.get_by_user_id(user_id).get_usercase_id()
usercase = accessor.get_case(usercase_id) if usercase_id else None
return json_response(map(
lambda case: CaseAPIResult(domain=domain, id=case['_id'], couch_doc=case, id_only=ids_only),
filter(None, [case, case.parent, usercase])
))
if case_id and not footprint:
# short circuit everything else and just return the case
# NOTE: this allows any user in the domain to access any case given
# they know its ID, which is slightly different from the previous
# behavior (can only access things you own + footprint). If we want to
# change this contract we would need to update this to check the
# owned case list + footprint
case = accessor.get_case(case_id)
assert case.domain == domain
cases = [CaseAPIResult(domain=domain, id=case_id, couch_doc=case, id_only=ids_only)]
else:
filters = get_filters_from_request_params(request_params)
status = api_closed_to_status(request_params.get('closed', 'false'))
case_type = filters.get('properties/case_type', None)
cases = get_filtered_cases(domain, status=status, case_type=case_type,
user_id=user_id, filters=filters,
footprint=footprint, ids_only=ids_only,
strip_history=True)
return json_response(cases)
@cloudcare_api
def filter_cases(request, domain, app_id, module_id, parent_id=None):
app = Application.get(app_id)
module = app.get_module(module_id)
auth_cookie = request.COOKIES.get('sessionid')
requires_parent_cases = string_to_boolean(request.GET.get('requires_parent_cases', 'false'))
xpath = EntriesHelper.get_filter_xpath(module)
instances = get_instances_for_module(app, module, additional_xpaths=[xpath])
extra_instances = [{'id': inst.id, 'src': inst.src} for inst in instances]
use_formplayer = toggles.USE_FORMPLAYER.enabled(domain)
accessor = CaseAccessors(domain)
# touchforms doesn't like this to be escaped
xpath = HTMLParser.HTMLParser().unescape(xpath)
case_type = module.case_type
if xpath or should_use_sql_backend(domain):
# if we need to do a custom filter, send it to touchforms for processing
additional_filters = {
"properties/case_type": case_type,
"footprint": True
}
helper = BaseSessionDataHelper(domain, request.couch_user)
result = helper.filter_cases(xpath, additional_filters, DjangoAuth(auth_cookie),
extra_instances=extra_instances, use_formplayer=use_formplayer)
if result.get('status', None) == 'error':
code = result.get('code', 500)
message = result.get('message', _("Something went wrong filtering your cases."))
if code == 500:
notify_exception(None, message=message)
return json_response(message, status_code=code)
case_ids = result.get("cases", [])
else:
# otherwise just use our built in api with the defaults
case_ids = [res.id for res in get_filtered_cases(
domain,
status=CASE_STATUS_OPEN,
case_type=case_type,
user_id=request.couch_user._id,
footprint=True,
ids_only=True,
)]
cases = accessor.get_cases(case_ids)
if parent_id:
cases = filter(lambda c: c.parent and c.parent.case_id == parent_id, cases)
# refilter these because we might have accidentally included footprint cases
# in the results from touchforms. this is a little hacky but the easiest
# (quick) workaround. should be revisted when we optimize the case list.
cases = filter(lambda c: c.type == case_type, cases)
cases = [c.to_api_json(lite=True) for c in cases if c]
response = {'cases': cases}
if requires_parent_cases:
# Subtract already fetched cases from parent list
parent_ids = set(map(lambda c: c['indices']['parent']['case_id'], cases)) - \
set(map(lambda c: c['case_id'], cases))
parents = accessor.get_cases(list(parent_ids))
parents = [c.to_api_json(lite=True) for c in parents]
response.update({'parents': parents})
return json_response(response)
@cloudcare_api
def get_apps_api(request, domain):
return json_response(get_cloudcare_apps(domain))
@cloudcare_api
def get_app_api(request, domain, app_id):
try:
return json_response(look_up_app_json(domain, app_id))
except RemoteAppError:
raise Http404()
@cloudcare_api
@cache_page(60 * 30)
def get_fixtures(request, domain, user_id, fixture_id=None):
try:
user = CommCareUser.get_by_user_id(user_id)
except CouchUser.AccountTypeError:
err = ("You can't use case sharing or fixtures as a %s. "
"Login as a mobile worker and try again.") % settings.WEB_USER_TERM,
return HttpResponse(err, status=412, content_type="text/plain")
if not user:
raise Http404
assert user.is_member_of(domain)
restore_user = user.to_ota_restore_user()
if not fixture_id:
ret = ElementTree.Element("fixtures")
for fixture in generator.get_fixtures(restore_user, version=V2):
ret.append(fixture)
return HttpResponse(ElementTree.tostring(ret), content_type="text/xml")
else:
fixture = generator.get_fixture_by_id(fixture_id, restore_user, version=V2)
if not fixture:
raise Http404
assert len(fixture.getchildren()) == 1, 'fixture {} expected 1 child but found {}'.format(
fixture_id, len(fixture.getchildren())
)
return HttpResponse(ElementTree.tostring(fixture.getchildren()[0]), content_type="text/xml")
@cloudcare_api
def get_sessions(request, domain):
# is it ok to pull user from the request? other api calls seem to have an explicit 'user' param
skip = request.GET.get('skip') or 0
limit = request.GET.get('limit') or 10
return json_response(get_open_form_sessions(request.user, skip=skip, limit=limit))
@cloudcare_api
def get_session_context(request, domain, session_id):
# NOTE: although this view does not appeared to be called from anywhere it is, and cannot be deleted.
# The javascript routing in cloudcare depends on it, though constructs it manually in a hardcoded way.
# see getSessionContextUrl in cloudcare/util.js
# Adding 'cloudcare_get_session_context' to this comment so that the url name passes a grep test
try:
session = EntrySession.objects.get(session_id=session_id)
except EntrySession.DoesNotExist:
session = None
if request.method == 'DELETE':
if session:
session.delete()
return json_response({'status': 'success'})
else:
helper = BaseSessionDataHelper(domain, request.couch_user)
return json_response(helper.get_full_context({
'session_id': session_id,
'app_id': session.app_id if session else None
}))
@cloudcare_api
def get_ledgers(request, domain):
"""
Returns ledgers associated with a case in the format:
{
"section_id": {
"product_id": amount,
"product_id": amount,
...
},
...
}
Note: this only works for the Couch backend
"""
request_params = request.GET
case_id = request_params.get('case_id')
if not case_id:
return json_response(
{'message': 'You must specify a case id to make this query.'},
status_code=400
)
try:
case = CaseAccessors(domain).get_case(case_id)
except CaseNotFound:
raise Http404()
ledger_map = LedgerAccessors(domain).get_case_ledger_state(case.case_id)
def custom_json_handler(obj):
if hasattr(obj, 'stock_on_hand'):
return obj.stock_on_hand
return json_handler(obj)
return json_response(
{
'entity_id': case_id,
'ledger': ledger_map,
},
default=custom_json_handler,
)
@cloudcare_api
def sync_db_api(request, domain):
auth_cookie = request.COOKIES.get('sessionid')
username = request.GET.get('username')
try:
response = sync_db(username, domain, DjangoAuth(auth_cookie))
except Exception, e:
return json_response(
{'status': 'error', 'message': unicode(e)},
status_code=500
)
else:
return json_response(response)
class ReadableQuestions(View):
urlname = 'readable_questions'
@csrf_exempt
@method_decorator(cloudcare_api)
def dispatch(self, request, *args, **kwargs):
return super(ReadableQuestions, self).dispatch(request, *args, **kwargs)
def post(self, request, domain):
instance_xml = request.POST.get('instanceXml').encode('utf-8')
app_id = request.POST.get('appId')
xmlns = request.POST.get('xmlns')
_, form_data_json = xml2json(instance_xml)
pretty_questions = readable.get_questions(domain, app_id, xmlns)
readable_form = readable.get_readable_form_data(form_data_json, pretty_questions)
rendered_readable_form = render_to_string(
'reports/form/partials/readable_form.html',
{'questions': readable_form}
)
return json_response({
'form_data': rendered_readable_form,
'form_questions': pretty_questions
})
@cloudcare_api
def render_form(request, domain):
# get session
session_id = request.GET.get('session_id')
session = get_object_or_404(EntrySession, session_id=session_id)
try:
raw_instance = get_raw_instance(session_id, domain)
except Exception, e:
return HttpResponse(e, status=500, content_type="text/plain")
xmlns = raw_instance["xmlns"]
form_data_xml = raw_instance["output"]
_, form_data_json = xml2json(form_data_xml)
pretty_questions = readable.get_questions(domain, session.app_id, xmlns)
readable_form = readable.get_readable_form_data(form_data_json, pretty_questions)
rendered_readable_form = render_to_string(
'reports/form/partials/readable_form.html',
{'questions': readable_form}
)
return json_response({
'form_data': rendered_readable_form,
'instance_xml': indent_xml(form_data_xml)
})
class HttpResponseConflict(HttpResponse):
status_code = 409
class EditCloudcareUserPermissionsView(BaseUserSettingsView):
template_name = 'cloudcare/config.html'
urlname = 'cloudcare_app_settings'
@property
def page_title(self):
if toggles.USE_FORMPLAYER_FRONTEND.enabled(self.domain):
return _("Web Apps Permissions")
else:
return _("CloudCare Permissions")
@method_decorator(domain_admin_required)
@method_decorator(requires_privilege_with_fallback(privileges.CLOUDCARE))
def dispatch(self, request, *args, **kwargs):
return super(EditCloudcareUserPermissionsView, self).dispatch(request, *args, **kwargs)
@property
def page_context(self):
apps = get_cloudcare_apps(self.domain)
access = ApplicationAccess.get_template_json(self.domain, apps)
groups = Group.by_domain(self.domain)
return {
'apps': apps,
'groups': groups,
'access': access,
}
def put(self, request, *args, **kwargs):
j = json.loads(request.body)
old = ApplicationAccess.get_by_domain(self.domain)
new = ApplicationAccess.wrap(j)
old.restrict = new.restrict
old.app_groups = new.app_groups
try:
if old._rev != new._rev or old._id != new._id:
raise ResourceConflict()
old.save()
except ResourceConflict:
return HttpResponseConflict()
else:
return json_response({'_rev': old._rev})<|fim▁end|> | # "view/:app/:module/:form/parent/:parent/case/:case/ |
<|file_name|>unhygienic_example.rs<|end_file_name|><|fim▁begin|>#![crate_type = "lib"]
extern crate my_crate;<|fim▁hole|>
#[macro_export]
macro_rules! unhygienic_macro {
() => {
// (1) unhygienic: depends on `my_crate` in the crate root at the invocation site.
::my_crate::f();
// (2) unhygienic: defines `f` at the invocation site (in addition to the above point).
use my_crate::f;
f();
g(); // (3) unhygienic: `g` needs to be in scope at use site.
$crate::g(); // (4) hygienic: this always resolves to (a)
}
}
#[allow(unused)]
fn test_unhygienic() {
unhygienic_macro!();
f(); // `f` was defined at the use site
}<|fim▁end|> |
pub fn g() {} // (a) |
<|file_name|>test_libwordlist.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# diceware_list -- generate wordlists for diceware
# Copyright (C) 2016-2019. Uli Fouquet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Tests for libwordlist module
from __future__ import unicode_literals
try:
from urllib.request import urlopen, URLError # python 3.x
except ImportError: # pragma: no cover
from urllib2 import urlopen, URLError # python 2.x
from io import StringIO
import codecs
import decimal
import gzip
import random
import pytest
import sys
from diceware_list import DEFAULT_CHARS
from diceware_list.libwordlist import (
alpha_dist, base10_to_n, filter_chars, base_terms_iterator,
idx_to_dicenums, min_width_iter, normalize, shuffle_max_width_items,
term_iterator, paths_iterator, is_prefix_code, get_matching_prefixes,
get_prefixes, strip_matching_prefixes, flatten_prefix_tree,
AndroidWordList, entropy_per_char_bruteforce, min_word_length,
min_length_iter
)
EMPTY_GZ_FILE = (
b'\x1f\x8b\x08\x08\xea\xc1\xecY\x02\xffsample_emtpy'
b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00')
def ggsource_unreachable():
"""Check, whether we can ping android.googlesource.com'.
Respective tests may be skipped if no network is available.
"""
try:
urlopen('https://android.googlesource.com/').read()
except URLError:
return True
return False
def test_base10_to_n():
# we can turn integers into n-based numbers
assert base10_to_n(0, 2) == [0]
assert base10_to_n(1, 2) == [1]
assert base10_to_n(2, 2) == [1, 0]
assert base10_to_n(3, 2) == [1, 1]
assert base10_to_n(7775, 6) == [5, 5, 5, 5, 5]
assert base10_to_n(0, 6) == [0, ]
assert base10_to_n(1, 6) == [1, ]
assert base10_to_n(6, 6) == [1, 0]
assert base10_to_n(34, 6) == [5, 4]
assert base10_to_n(35, 6) == [5, 5]
assert base10_to_n(37, 6) == [1, 0, 1]
assert base10_to_n(38, 6) == [1, 0, 2]
assert base10_to_n(255, 16) == [15, 15]
assert base10_to_n(256, 16) == [1, 0, 0]
def test_filter_chars():
# we can detect words with unwanted chars
assert list(filter_chars([], DEFAULT_CHARS)) == []
assert list(filter_chars(["a", "b"], DEFAULT_CHARS)) == ["a", "b"]
assert list(filter_chars(["ä"], DEFAULT_CHARS)) == []
assert list(filter_chars(["a", "ä"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars(["ä", "a"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars(["a", "ä", "b"], DEFAULT_CHARS)) == ["a", "b"]
assert list(filter_chars(["a", "aä", "bö"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars([u"a", u"ä"], DEFAULT_CHARS)) == [u"a"]
def test_filter_chars_all_allowed():
# if `allowed` is None, no filtering will be done
assert list(filter_chars(['ä'], None)) == ['ä']
def test_idx_to_dicenums():
# we can get dice numbers from list indexes
assert idx_to_dicenums(0, 5) == "1-1-1-1-1"
assert idx_to_dicenums(1, 5) == "1-1-1-1-2"
assert idx_to_dicenums(7774, 5) == "6-6-6-6-5"
assert idx_to_dicenums(7775, 5) == "6-6-6-6-6"
# different dice sides, different results
assert idx_to_dicenums(0, 4, 4) == "1-1-1-1"
assert idx_to_dicenums(255, 4, 4) == "4-4-4-4"
assert idx_to_dicenums(255, 4) == "2-2-1-4"
# we can change the separator string (or leave it out)
assert idx_to_dicenums(0, 3) == "1-1-1" # default
assert idx_to_dicenums(0, 3, separator="sep") == "1sep1sep1"
assert idx_to_dicenums(0, 3, separator="") == "111"
def test_idx_to_dicenums_gives_text():
# we get text from this function, i.e. unicode under py2.
result = idx_to_dicenums(0, 5)
assert isinstance(result, type('text'))
def test_min_width_iter(monkeypatch):
# we can get iterators with minimal list width.
monkeypatch.setattr(random, "shuffle", lambda x: x)
assert list(min_width_iter(["bb", "a", "ccc", "dd"], 3)) == [
"a", "bb", "dd"]
assert list(min_width_iter(["c", "a", "b"], 2)) == ["a", "b"]
assert list(min_width_iter(["c", "a", "b"], 3)) == ["a", "b", "c"]
assert list(min_width_iter(["a", "c", "bb"], 2)) == ["a", "c"]
assert list(min_width_iter(["a", "cc", "b"], 2)) == ["a", "b"]
assert list(min_width_iter(["aa", "c", "bb"], 2)) == ["c", "aa"]
def test_min_length_iter():
assert list(min_length_iter(iter([]))) == []
assert list(min_length_iter(iter([]), 1)) == []
assert list(
min_length_iter(iter(["a", "bb", "ccc"]), 2)) == ["bb", "ccc"]
def test_min_width_iter_shuffle_max_widths_values(monkeypatch):
# words with maximum width are shuffled
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
assert list(min_width_iter(
["a", "aa", "bb"], 2, shuffle_max_width=True)) == ["a", "bb"]
assert list(min_width_iter(
["bbb", "aa", "a"], 2, shuffle_max_width=True)) == ["a", "aa"]
assert list(min_width_iter(
["aa", "a"], 2, shuffle_max_width=True)) == ["a", "aa"]
def test_min_width_iter_discards_min_len_values(monkeypatch):
# too short terms are discarded
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=False, min_len=1))) == ['a', 'b']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=False, min_len=2))) == ['aa', 'ccc']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=True, min_len=1))) == ['a', 'b']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=True, min_len=2))) in (['aa', 'ccc'], ['aa', 'ddd'])
def test_normalize():
# we can normalize texts.
assert normalize("ªºÀÁÂÃÄÅÆ") == "aoAAAAAEAAE"
assert normalize("ÇÈÉÊËÌÍÎÏ") == "CEEEEIIII"
assert normalize("ÒÓÔÕÖØÙÚÛÜ") == "OOOOOEOEUUUUE"
assert normalize("ĐđÐÑÝßàáâãäåæçèéêë") == "DdDNYssaaaaaeaaeceeee"
assert normalize("ìíîïñòóôõöøùúûüý") == "iiiinoooooeoeuuuuey"
assert normalize("ÿĀāĂ㥹ĆćĈĉĊċČčĎď") == "yAaAaAaCcCcCcCcDd"
assert normalize("ĒēĔĕĖėĘęĚěĜĝĞğĠġĢģ") == "EeEeEeEeEeGgGgGgGg"
assert normalize("ĤĥĨĩĪīĬĭĮįİĒēĔĕĖė") == "HhIiIiIiIiIEeEeEe"
assert normalize("ĘęĚěĜĝĞğĠġĢģĤĥ") == "EeEeGgGgGgGgHh"
assert normalize("ĨĩĪīĬĭĮįİIJijĴĵĶķ") == "IiIiIiIiIIJijJjKk"
assert normalize("ĹĺĻļĽľĿŀŃńŅņŇňŌō") == "LlLlLlL·l·NnNnNnOo"
assert normalize("ŎŏŐőŔŕŖŗŘřŚśŜŝŞşŠš") == "OoOoRrRrRrSsSsSsSs"
assert normalize("ŢţŤťŨũŪūŬŭŮůŰűŲų") == "TtTtUuUuUuUuUuUu"
assert normalize("ŴŵŶŷŸŹźŻżŽžſ") == "WwYyYZzZzZzs"
# "þĦħĦħıĸŁłŊŋʼnŒœŦŧƀƁƂƃƄƅƆƇƈƉƊƋƌƍ""
assert normalize("mäßig") == "maessig"
def test_normalize_gives_text():
# we get unicode/text strings back
assert isinstance(normalize("far"), type("text"))
assert isinstance(normalize("fär"), type("text"))
assert isinstance(normalize(str("far")), type("text"))
def test_shuffle_max_width_items(monkeypatch):
# we can shuffle the max width items of a list
# install a pseudo-shuffler that generates predictable orders
# so that last elements are returned in reverse order.
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
# an ordered list
result = list(shuffle_max_width_items(["a", "aa", "bb", "cc"]))
assert result == ["a", "cc", "bb", "aa"]
# an unordered list
result = list(shuffle_max_width_items(["aa", "d", "bb", "a", "cc"]))
assert result == ["d", "a", "cc", "bb", "aa"]
# a list of which the longes item should not be part of
result = list(shuffle_max_width_items(
["eeee", "bb", "ccc", "aa", "ddd"], max_width=3))
assert "eeee" not in result
# a list with one length only
result = list(shuffle_max_width_items(["aa", "bb", "cc"]))
assert result == ["cc", "bb", "aa"]
def test_shuffle_max_width_items_copes_with_files(monkeypatch, tmpdir):
# when shuffling max width entries we accept file input
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"bb", b"cc"]))
with open(str(wlist), "rb") as fd:
result = list(shuffle_max_width_items(fd))
assert result == [b"a", b"cc", b"bb"]
def test_base_terms_iterator():
# we can get an iterator over base terms
base_iter = base_terms_iterator()
base_list = list(base_iter)
assert "a2" in base_list
assert "9z" in base_list
assert "0" in base_list
assert "zzzz" in base_list
def test_base_terms_iterator_option_use_kit():
# we can tell whether to use dicewarekit, diceware416 lists.
assert "yyyy" not in list(base_terms_iterator(use_kit=False))
assert "a2" in list(base_terms_iterator(use_kit=False))
assert "yyyy" in list(base_terms_iterator(use_kit=True))
assert "a2" in list(base_terms_iterator(use_kit=True))
class TestTermIterator(object):
def test_term_iterator(self, tmpdir):
# the term_iterator really returns iterators
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"b", b"c"]))
with open(str(wlist), "rb") as fd:
result = list(term_iterator([fd, ]))
assert result == [b"a", b"b", b"c"]
def test_term_iterator_multiple_files(self, tmpdir):
# we can feed multiple input files to term_iterator
wlist1 = tmpdir.join("wlist1.txt")
wlist2 = tmpdir.join("wlist2.txt")
wlist1.write(b"\n".join([b"a1", b"b1", b"c1"]))
wlist2.write(b"\n".join([b"a2", b"b2", b"c2"]))
with open(str(wlist1), "rb") as fd1:
with open(str(wlist2), "rb") as fd2:
result = list(term_iterator([fd1, fd2]))
assert result == [b"a1", b"b1", b"c1", b"a2", b"b2", b"c2"]
def test_term_iterator_handles_umlauts(self, tmpdir):
# we can feed term iterators with umlauts
wlist = tmpdir.join("wlist.txt")
wlist.write_text(u"ä\nö\n", "utf-8")
with codecs.open(str(wlist), "r", "utf-8") as fd:
result = list(term_iterator([fd, ]))
assert result == ["ä", "ö"]
def test_term_iterator_ignores_empty_lines(self, tmpdir):
# empty lines will be ignored
wlist = tmpdir.join("wlist.txt")
wlist.write("foo\n\nbar\n\n")
with open(str(wlist), "r") as fd:
result = list(term_iterator([fd, ]))
assert result == ["foo", "bar"]
class TestPathsIterator(object):
def test_paths_iterator(self, tmpdir):
# the paths iterator provides terms from paths
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"b", b"c"]))
result = list(paths_iterator([str(wlist), ]))
assert result == ["a", "b", "c"]
def test_multiple_paths(self, tmpdir):
# the paths iterator can cope with several files
wlist1 = tmpdir.join("wlist1.txt")
wlist2 = tmpdir.join("wlits2.txt")
wlist1.write(b"a\nb")
wlist2.write(b"c\nd")
result = list(paths_iterator([str(wlist1), str(wlist2)]))
assert result == ["a", "b", "c", "d"]
def test_read_stdin(self, tmpdir, argv_handler):
# we can tell to read from stdin (dash as filename)
sys.stdin = StringIO('term1\nterm2\näöü\n')
result = list(paths_iterator('-'))
assert result == ['term1', 'term2', 'äöü']
class TestIsPrefixCode(object):
def test_is_prefix_code(self):
# we can really tell whether some list is a prefix code.
assert is_prefix_code(["aa", "ab", "ac"]) is True
assert is_prefix_code([]) is True
assert is_prefix_code(["a", "ab", "c"]) is False
assert is_prefix_code(["a", "c", "ab"]) is False
assert is_prefix_code(["aa", "b", "a"]) is False # order
assert is_prefix_code(["a", "a"]) is False # identity
def test_is_prefix_code_sorted_input(self):
# we do not sort already sorted input
assert is_prefix_code(["a", "aa", "b"], is_sorted=True) is False
assert is_prefix_code(["b", "c", "d"], is_sorted=True) is True
assert is_prefix_code(["b", "a"], is_sorted=False) is True
# we do not define behavior for unsorted lists, if `is_sorted` is True
def test_is_prefix_code_accepts_iter(self):
# is_prefix_code really copes with iterators (not only iterables)
assert is_prefix_code(iter(["a", "b", "c"])) is True
assert is_prefix_code(iter(["aa", "a"])) is False
def test_is_prefix_code_non_destructive(self):
# is_prefix_code is a non-destructive function.
iterable = ["d", "b", "c"]
is_prefix_code(iterable, is_sorted=False)
assert iterable == ["d", "b", "c"]
iterable = ["a", "b", "c"]
is_prefix_code(iterable, is_sorted=True)
assert iterable == ["a", "b", "c"]
def test_is_prefix_code_non_ascii(self):
# is_prefix_code copes with umlauts etc.
assert is_prefix_code(["z", "ä", "y", "äh"]) is False
assert is_prefix_code(["a", "äh"]) is True
class TestGetMatchingPrefixes(object):
def test_get_matching_prefixes(self):
assert list(get_matching_prefixes([])) == []
assert list(get_matching_prefixes(["a", "aa", "ab", "b", "x"])) == [
("a", "aa"), ("a", "ab")]
assert list(get_matching_prefixes(["a", "aa"])) == [("a", "aa")]
assert list(get_matching_prefixes(["b", "aa", "a"])) == [("a", "aa")]
def test_get_matching_prefixes_sorted_input(self):
# we can presort input lists
assert list(
get_matching_prefixes(["a", "aa", "ab"], is_sorted=True)) == [
("a", "aa"), ("a", "ab")]
assert list(get_matching_prefixes(["aa", "a"], is_sorted=False)) == [
("a", "aa")]
assert list(
get_matching_prefixes(["a", "aa", "aaa"], is_sorted=True)) == [
("a", "aa"), ("a", "aaa"), ("aa", "aaa")]
assert list(
get_matching_prefixes(["a", "aa", "aaa", "aaaa"], is_sorted=True)
) == [
("a", "aa"), ("a", "aaa"), ("a", "aaaa"), ("aa", "aaa"),
("aa", "aaaa"), ("aaa", "aaaa")]
def test_get_matching_prefixes_non_destructive(self):
# the given input will not be changed.
iterable = ["a", "aa", "c"]
list(get_matching_prefixes(iterable, is_sorted=False))
assert iterable == ["a", "aa", "c"]
list(get_matching_prefixes(iterable, is_sorted=True))
assert iterable == ["a", "aa", "c"]
def test_get_matching_prefixes_non_ascii(self):
# get_matching_prefixes copes with umlauts etc.
get_matching_prefixes(["a", "ä", "ö"], is_sorted=False) == []
get_matching_prefixes(["a", "ä", "äh"], is_sorted=False) == [
("ä", "äh")]
class TestStrinMatchingPrefixes(object):
def test_strip_matching_prefixes(self):
# we can get prefix code from any input
assert list(strip_matching_prefixes(
["a", "aa", "b"], is_sorted=False, prefer_short=True)
) == ["a", "b"]<|fim▁hole|> assert list(strip_matching_prefixes(
["aa", "a", "b"], is_sorted=False, prefer_short=True)
) == ["a", "b"]
assert list(strip_matching_prefixes(
["a", "aa"], is_sorted=False, prefer_short=True)) == ["a"]
assert list(strip_matching_prefixes(
["aa", "a"], is_sorted=False, prefer_short=True)) == ["a"]
def test_strip_matching_prefixes_empty(self):
# we cope with empty iterables
assert list(strip_matching_prefixes([], is_sorted=True)) == []
def test_strip_matching_prefixes_non_destructive(self):
# given input will not be modified
in_list = ["b", "a", "aa"]
result = list(strip_matching_prefixes(in_list, is_sorted=False))
assert in_list == ["b", "a", "aa"] # unchanged
assert result == ["a", "b"]
def test_strip_matching_prefixes_prefer_short(self):
# we can tell to prefer shorter prefixes
in_list = ["a", "aa", "b"]
result1 = list(strip_matching_prefixes(
in_list, is_sorted=False, prefer_short=True))
assert result1 == ["a", "b"]
result2 = list(strip_matching_prefixes(
in_list, is_sorted=False, prefer_short=False))
assert result2 == ["aa", "b"]
result3 = list(strip_matching_prefixes(
["a", "aa", "ab", "c"], is_sorted=True, prefer_short=True))
assert result3 == ["a", "c"]
def test_strip_matching_prefixes_third_nesting_level(self):
# we cope with highly nested prefixes
result = list(strip_matching_prefixes(
["a", "aa", "aaa"], prefer_short=False))
assert result == ["aaa"]
result = list(strip_matching_prefixes(
["a", "aa", "aaa"], prefer_short=True))
assert result == ["a"]
def test_get_prefixes():
# we can create tree-like nested lists of prefixed lists of strings
assert get_prefixes([]) == []
assert get_prefixes(["a"]) == [["a"]]
assert get_prefixes(["a", "b"]) == [["a"], ["b"]]
assert get_prefixes(["a", "ab"]) == [["a", ["ab"]]]
assert get_prefixes(["a", "aa", "b"]) == [["a", ["aa"]], ["b"]]
assert get_prefixes(["a", "b", "ba"]) == [["a"], ["b", ["ba"]]]
assert get_prefixes(["a", "aa", "aaa", "ab"]) == [
['a', ['aa', ['aaa']], ['ab']]]
assert get_prefixes(["a", "aa", "aaa", "ab", "ac"]) == [
['a', ['aa', ['aaa']], ['ab'], ['ac']]]
def test_flatten_prefix_tree():
# we can flatten prefix trees
assert flatten_prefix_tree([["a"], ["b"]]) == ["a", "b"]
assert flatten_prefix_tree([["a", ["ab"]]]) == ["a"]
assert flatten_prefix_tree(
[["a", ["ab"]]], prefer_short=False) == ["ab"]
assert flatten_prefix_tree(
[['a', ['aa', ['aaa']], ['ab'], ['ac']]], prefer_short=False) == [
'aaa', 'ab', 'ac']
def test_alpha_dist():
# we get proper distributions of alphabets
assert alpha_dist([]) == dict()
assert alpha_dist(['a', 'b']) == dict(a=1, b=1)
assert alpha_dist(['ab', 'b']) == dict(a=1, b=2)
def test_entropy_per_char_bruteforce():
# we can get the entropy per char for plain bruteforce
decimal.getcontext().prec = 3
assert entropy_per_char_bruteforce(['ab', ]) == decimal.Decimal(1.0)
assert entropy_per_char_bruteforce(['a', 'b']) == decimal.Decimal(1.0)
assert entropy_per_char_bruteforce(
['aaa', 'b']) == decimal.Decimal('0.811')
assert entropy_per_char_bruteforce(
['ab', 'bc', 'cd', 'da']) == decimal.Decimal('2.0')
assert entropy_per_char_bruteforce(
['art', 'air']) == decimal.Decimal('1.92')
def test_min_word_length():
# we can compute the minimum length of a word required for a wordlist
assert min_word_length([]) == 1
assert min_word_length(['a', 'aa', 'aaa']) == 1
assert min_word_length(['a', 'b']) == 1
assert min_word_length(['abcd'] * 8192) == 7
assert min_word_length(['abab'] * 16) == 4
# we also accept iterators as input
assert min_word_length(iter(['a', 'b'])) == 1
def test_min_word_length_desired_len():
# the desired list length can differ from the current list length
# char entropy = 2.0, 16 = 2^4
assert min_word_length(['abcd'] * 1024, 16) == 2
# char entropy = 2.0, 32 = 2^5
assert min_word_length(['abcd'] * 8192, 32) == 3
class TestAndroidWordlist(object):
def test_attributes(self):
# android wordlists objects provide some attributes we expect
wl = AndroidWordList()
assert hasattr(wl, "base_url")
assert hasattr(wl, "path")
assert hasattr(wl, "gz_data")
assert hasattr(wl, "lang")
def test_init_path(self, local_android_dir):
# we can pass in a path to an unencoded file (no base64).
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList('file:////%s' % path)
assert wl.path == 'file:////%s' % path
def test_download(self, local_android_download_b64):
# we can download wordfiles that are base64 encoded.
wl = AndroidWordList(lang="de")
dl_data = wl.download()
assert wl.decompress(dl_data) == (
b'dictionary=main:de,locale=de,description=Deutsch,'
b'date=1414726263,version=54,REQUIRES_GERMAN_UMLAUT_PROCESSING=1'
b'\n word=der,f=216,flags=,originalFreq=216\n word=und,f=213,'
b'flags=,originalFreq=213\n')
def test_download_de(self, local_android_download_b64):
# we can download a german wordlist.
wl = AndroidWordList(lang="de")
wl.download()
assert list(wl.get_words()) == ['der', 'und']
def test_download_en(self, local_android_download_b64):
# we can download an english wordlist.
wl = AndroidWordList(lang="en")
wl.download()
assert list(wl.get_words()) == [
'the', 'to', 'of', 'and', 'hardcore', 'import']
def test_decompress(self, local_android_dir):
# we can decompress downloaded stuff.
wl = AndroidWordList()
path = local_android_dir / "de_wordlist.combined.gz"
data = path.read_binary()
assert wl.decompress(data).startswith(b"dictionary=main:de,locale=de")
def test_save(self, local_android_download_b64, tmpdir):
# we can save downloaded wordlists.
wl = AndroidWordList(lang="en")
wl.download()
path = tmpdir / 'mywordlist.gz'
wl.save(str(path))
assert path.isfile()
assert path.size() == 235
def test_save_no_data(self, local_android_download_b64, tmpdir):
# we do not complain when no data was downloaded already
wl = AndroidWordList()
path = tmpdir / 'mywordlist.gz'
wl.save(str(path))
assert not path.isfile()
def test_get_basename(self):
# we can get the basename of the file to download
wl = AndroidWordList()
assert wl.get_basename() == "en_wordlist.combined.gz"
def test_get_basename_lang(self, local_android_download_b64):
# when getting basename, we can select the language
wl = AndroidWordList()
assert wl.get_basename(lang="de") == "de_wordlist.combined.gz"
def test_get_basename_path(self, local_android_dir):
# we get a correct basename also if path is set manually
wl = AndroidWordList()
path1 = local_android_dir / "de_wordlist.combined.gz"
path2 = local_android_dir / "my_wordlist.gzip"
path1.copy(path2)
wl = AndroidWordList('file:////%s' % path2)
assert wl.get_basename(lang="foo") == "my_wordlist.gzip"
def test_metadata(self, local_android_dir):
# we can extract metadata from android wordfiles
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList()
wl.gz_data = path.read_binary()
meta = wl.get_meta_data()
assert meta == {
'dictionary': 'main:de',
'locale': 'de',
'description': 'Deutsch',
'date': '1414726263',
'version': '54',
'REQUIRES_GERMAN_UMLAUT_PROCESSING': '1'
}
def test_metadata_none(self):
# we cope with situation, when no wordfile was set before.
wl = AndroidWordList()
assert wl.get_meta_data() == {}
def test_metadata_empty(self):
# we cope with situation, where the wordfile is empty
wl = AndroidWordList()
wl.gz_data = EMPTY_GZ_FILE
assert wl.get_meta_data() == {}
def test_parse_lines(self, local_android_dir):
# we can raw parse simple lists
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList('file:////%s' % path)
lines = wl.parse_lines()
assert [x for x in lines] == [
{
'dictionary': 'main:de',
'locale': 'de',
'description': 'Deutsch',
'date': '1414726263',
'version': '54',
'REQUIRES_GERMAN_UMLAUT_PROCESSING': '1'},
{
'word': 'der', 'f': '216', 'flags': '',
'originalFreq': '216'},
{
'word': 'und', 'f': '213', 'flags': '',
'originalFreq': '213'},
]
def test_parse_lines_ignores_empty_lines(self, tmpdir):
# empty lines in wordlist files are ignored by the parser
path = tmpdir / 'sample_empty_lines.gz'
with gzip.open(str(path), 'wb') as f:
f.write(b'\n\n\n')
wl = AndroidWordList('file:////%s' % path)
lines = wl.parse_lines()
assert list(lines) == []
def test_get_words(self, dictfile_android_short_de):
# we can get plain wordlists from Android lists
wl = AndroidWordList("file:////%s" % str(dictfile_android_short_de))
assert [x for x in wl.get_words()] == ["der", "und"]
def test_get_words_offensive(self, dictfile_android_short_en):
# we can filter out offensive words
wl = AndroidWordList("file:////%s" % str(dictfile_android_short_en))
list1 = list(wl.get_words(offensive=False))
assert "hardcore" not in list1
assert "the" in list1
list2 = list(wl.get_words(offensive=True))
assert "hardcore" in list2
assert "the" not in list2
list3 = list(wl.get_words(offensive=None))
assert "hardcore" in list3
assert "the" in list3
@pytest.mark.skipif(ggsource_unreachable(), reason="no network available")
def test_get_valid_lang_codes(self):
# we can get a list of available language codes.
wl = AndroidWordList()
result = wl.get_valid_lang_codes()
assert result[0:3] == ['cs', 'da', 'de']
def test_get_valid_lang_codes_local(self, local_index):
# get valid lang codes from local copy of index list.
wl = AndroidWordList()
result = wl.get_valid_lang_codes()
assert result == [
'cs', 'da', 'de', 'el', 'en', 'en_GB', 'en_US', 'es',
'fi', 'fr', 'hr', 'it', 'iw', 'lt', 'lv', 'nb', 'nl', 'pl',
'pt_BR', 'pt_PT', 'ro', 'ru', 'sl', 'sr', 'sv', 'tr']<|fim▁end|> | |
<|file_name|>editorsettingscomments.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//
// copyright : (C) 2008 by Eran Ifrah
// file name : editorsettingscomments.cpp
//
// -------------------------------------------------------------------------
// A
// _____ _ _ _ _
// / __ \ | | | | (_) |
// | / \/ ___ __| | ___| | _| |_ ___
// | | / _ \ / _ |/ _ \ | | | __/ _ )
// | \__/\ (_) | (_| | __/ |___| | || __/
// \____/\___/ \__,_|\___\_____/_|\__\___|
<|fim▁hole|>// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
#include "editorsettingscomments.h"
#include "commentconfigdata.h"
EditorSettingsComments::EditorSettingsComments( wxWindow* parent )
: EditorSettingsCommentsBase( parent )
, TreeBookNode<EditorSettingsComments>()
{
CommentConfigData data;
EditorConfigST::Get()->ReadObject(wxT("CommentConfigData"), &data);
m_checkBoxContCComment->SetValue( data.GetAddStarOnCComment() );
m_checkBoxContinueCppComment->SetValue( data.GetContinueCppComment() );
m_checkBoxSmartAddFiles->SetValue( EditorConfigST::Get()->GetOptions()->GetOptions() & OptionsConfig::Opt_SmartAddFiles );
size_t flags = EditorConfigST::Get()->GetOptions()->GetOptions();
if ( !(flags & (OptionsConfig::Opt_NavKey_Alt|OptionsConfig::Opt_NavKey_Control)) ) {
flags = OptionsConfig::Opt_NavKey_Alt|OptionsConfig::Opt_NavKey_Control; // force the least-instrusive meta key default
}
m_checkBoxAlt->SetValue( flags & OptionsConfig::Opt_NavKey_Alt );
m_checkBoxCtrl->SetValue( flags & OptionsConfig::Opt_NavKey_Control );
}
void EditorSettingsComments::Save(OptionsConfigPtr options)
{
CommentConfigData data;
EditorConfigST::Get()->ReadObject(wxT("CommentConfigData"), &data);
data.SetAddStarOnCComment(m_checkBoxContCComment->IsChecked());
data.SetContinueCppComment(m_checkBoxContinueCppComment->IsChecked());
EditorConfigST::Get()->WriteObject(wxT("CommentConfigData"), &data);
size_t flags = options->GetOptions();
if( m_checkBoxSmartAddFiles->IsChecked() )
flags |= OptionsConfig::Opt_SmartAddFiles;
else
flags &= ~OptionsConfig::Opt_SmartAddFiles;
// clear the navigation key code
flags &= ~(OptionsConfig::Opt_NavKey_Alt|OptionsConfig::Opt_NavKey_Control|OptionsConfig::Opt_NavKey_Shift);
if( m_checkBoxCtrl->IsChecked() )
flags |= OptionsConfig::Opt_NavKey_Control;
if( m_checkBoxAlt->IsChecked() )
flags |= OptionsConfig::Opt_NavKey_Alt;
if ( !(flags & (OptionsConfig::Opt_NavKey_Alt|OptionsConfig::Opt_NavKey_Control)) ) {
flags |= OptionsConfig::Opt_NavKey_Alt|OptionsConfig::Opt_NavKey_Control; // force the least-instrusive meta key default
}
options->SetOptions(flags);
}<|fim▁end|> | //
// F i l e
//
|
<|file_name|>game.py<|end_file_name|><|fim▁begin|># Copyright 2010, 2014 Gerardo Marset <[email protected]>
#
# This file is part of Haxxor Engine.
#
# Haxxor Engine is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Haxxor Engine is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Haxxor Engine. If not, see <http://www.gnu.org/licenses/>.
import os
import time
import json
import tools
from filesystem import File
import cli
import system
import missions
SAVEGAME = "{}.sav"
DOWNLOADS_DIR = "C:\\Descargas"
class Game(object):
def __init__(self):
self.running = True
print("Bienvenido a Haxxor Engine.")
try:
self.name = "test" if tools.DEBUG else ask_for_name()
except EOFError:
self.running = False<|fim▁hole|> return
self.clear()
if os.path.isfile(SAVEGAME.format(self.name)):
self.load()
print("Juego cargado.")
else:
self.aliases = default_aliases()
self.mission_id = 0
self.system = system.default_local_system()
self.save()
print("Una nueva partida fue creada para {}.".format(self.name))
print("Escribí \"help\" para ver la lista de comandos.")
self.start_mission()
self.cli = cli.CLI(self.system, self)
@property
def valid_hosts(self):
return ["127.0.0.1", "localhost", self.system.ip,
self.mission.system.ip]
def start_mission(self, restart=None):
self.mission = (missions.missions[self.mission_id].
get_prepared_copy(self, restart))
print("Tenés un e-mail. Escribí \"mail\" para verlo.")
def main_loop(self):
while self.running:
ms = time.time()
self.cli.prompt()
if not self.cli.system.is_local:
if self.mission.ip_tracker.update(time.time() - ms,
self.system.ip):
self.telnet_end("Conexión perdida.\nTu IP fue rastreada.",
True)
def load(self):
with open(SAVEGAME.format(self.name), "r") as f:
load_dict = json.loads(f.read())
self.aliases = load_dict["aliases"]
self.mission_id = load_dict["mission_id"]
filesystem = load_dict["filesystem"]
ip = load_dict["ip"]
def recursive_loop(directory):
for name, value in directory.items():
if isinstance(value, dict):
for element in recursive_loop(value):
pass
else:
directory[name] = File(value)
yield name
for element in recursive_loop(filesystem):
pass
self.system = system.System(filesystem, ip, True)
def save(self):
with open(SAVEGAME.format(self.name), "w") as f:
f.write(json.dumps({
"aliases": self.aliases,
"mission_id": self.mission_id,
"filesystem": self.system.filesystem,
"ip": self.system.ip
}, indent=4, default=lambda o: o.id_))
def clear(self):
if os.name == "posix":
os.system("clear")
elif os.name in ("nt", "dos", "ce"):
os.system("cls")
else:
print("\n" * 300)
def telnet_start(self):
self.cli.system = self.mission.system
self.clear()
print(self.mission.asciiart)
if not self.cli.telnet_login():
self.telnet_end()
def telnet_end(self, message="Conexión cerrada.", force_fail=False):
self.cli.system = self.system
self.clear()
print(message)
if force_fail or not self.mission.is_complete():
print("Misión fallida.")
self.start_mission(self.mission)
return
print("Misión superada.")
downloads_dir = self.system.retrieve(tools.
dir_to_dirlist(DOWNLOADS_DIR))
for file_name, file_ in self.mission.downloads:
downloads_dir[file_name] = file_
self.mission_id += 1
self.start_mission()
def default_aliases():
return {
"cd..": "cd ..",
"ls": "dir",
"rm": "del",
"clear": "cls"
}
def ask_for_name():
while True:
name = tools.iinput("¿Cuál es tu nombre? ")
if name == "":
print("Escribí tu nombre.")
continue
if not all(ord(c) < 128 for c in name):
print("Solo se permiten caracteres ASCII.")
continue
if not name.isalnum():
print("Solo se permiten caracteres alfanuméricos.")
continue
break
return name<|fim▁end|> | |
<|file_name|>deployment.go<|end_file_name|><|fim▁begin|>package deploy
import (
"github.com/drone/drone/plugin/condition"
"github.com/drone/drone/shared/build/buildfile"
"github.com/drone/drone/shared/build/repo"
"github.com/drone/drone/plugin/deploy/deis"
"github.com/drone/drone/plugin/deploy/git"
"github.com/drone/drone/plugin/deploy/heroku"
"github.com/drone/drone/plugin/deploy/marathon"
"github.com/drone/drone/plugin/deploy/modulus"
"github.com/drone/drone/plugin/deploy/nodejitsu"
"github.com/drone/drone/plugin/deploy/tsuru"
)
// Deploy stores the configuration details
// for deploying build artifacts when
// a Build has succeeded
type Deploy struct {
CloudFoundry *CloudFoundry `yaml:"cloudfoundry,omitempty"`
Git *git.Git `yaml:"git,omitempty"`
Heroku *heroku.Heroku `yaml:"heroku,omitempty"`
Deis *deis.Deis `yaml:"deis,omitempty"`
Modulus *modulus.Modulus `yaml:"modulus,omitempty"`
Nodejitsu *nodejitsu.Nodejitsu `yaml:"nodejitsu,omitempty"`
SSH *SSH `yaml:"ssh,omitempty"`
Tsuru *tsuru.Tsuru `yaml:"tsuru,omitempty"`
Bash *Bash `yaml:"bash,omitempty"`
Marathon *marathon.Marathon `yaml:"marathon,omitempty"`
}
func (d *Deploy) Write(f *buildfile.Buildfile, r *repo.Repo) {
if d.CloudFoundry != nil && match(d.CloudFoundry.GetCondition(), r) {
d.CloudFoundry.Write(f)
}
if d.Git != nil && match(d.Git.GetCondition(), r) {
d.Git.Write(f)
}
if d.Heroku != nil && match(d.Heroku.GetCondition(), r) {
d.Heroku.Write(f)
}
if d.Deis != nil && match(d.Deis.GetCondition(), r) {
d.Deis.Write(f)
}
if d.Modulus != nil && match(d.Modulus.GetCondition(), r) {
d.Modulus.Write(f)
}
if d.Nodejitsu != nil && match(d.Nodejitsu.GetCondition(), r) {
d.Nodejitsu.Write(f)
}
if d.SSH != nil && match(d.SSH.GetCondition(), r) {
d.SSH.Write(f)
}
if d.Tsuru != nil && match(d.Tsuru.GetCondition(), r) {<|fim▁hole|> }
if d.Bash != nil && match(d.Bash.GetCondition(), r) {
d.Bash.Write(f)
}
if d.Marathon != nil && match(d.Marathon.GetCondition(), r) {
d.Marathon.Write(f)
}
}
func match(c *condition.Condition, r *repo.Repo) bool {
switch {
case c == nil:
return true
case !c.MatchBranch(r.Branch):
return false
case !c.MatchOwner(r.Name):
return false
case !c.MatchPullRequest(r.PR):
return false
}
return true
}<|fim▁end|> | d.Tsuru.Write(f) |
<|file_name|>ogr_gpsbabel.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
###############################################################################
# $Id: ogr_gpsbabel.py 33793 2016-03-26 13:02:07Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test read functionality for OGR GPSBabel driver.
# Author: Even Rouault <even dot rouault at mines dash paris dot org>
#
###############################################################################
# Copyright (c) 2010, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
from osgeo import ogr
from osgeo import gdal
###############################################################################
# Check that dependencies are met
def ogr_gpsbabel_init():
# Test if the gpsbabel is accessible
ogrtest.have_gpsbabel = False
ogrtest.have_read_gpsbabel = False
try:
ret = gdaltest.runexternal('gpsbabel -V')
except:
ret = ''
if ret.find('GPSBabel') == -1:
print('Cannot access GPSBabel utility')
return 'skip'
try:
ds = ogr.Open( 'data/test.gpx' )
except:
ds = None
if ds is None:
print('GPX driver not configured for read support')
else:
ogrtest.have_read_gpsbabel = True
ogrtest.have_gpsbabel = True
return 'success'
###############################################################################
# Test reading with explicit subdriver
def ogr_gpsbabel_1():
if not ogrtest.have_read_gpsbabel:
return 'skip'
ds = ogr.Open('GPSBabel:nmea:data/nmea.txt')
if ds is None:
return 'fail'
if ds.GetLayerCount() != 2:
return 'fail'
return 'success'
###############################################################################
# Test reading with implicit subdriver
def ogr_gpsbabel_2():
if not ogrtest.have_read_gpsbabel:
return 'skip'
ds = ogr.Open('data/nmea.txt')
if ds is None:
return 'fail'
if ds.GetLayerCount() != 2:
return 'fail'
return 'success'
###############################################################################
# Test writing
def ogr_gpsbabel_3():
if not ogrtest.have_gpsbabel:
return 'skip'
ds = ogr.GetDriverByName('GPSBabel').CreateDataSource('GPSBabel:nmea:tmp/nmea.txt')
lyr = ds.CreateLayer('track_points', geom_type = ogr.wkbPoint)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetField('track_fid', 0)
feat.SetField('track_seg_id', 0)
feat.SetField('track_name', 'TRACK_NAME')
feat.SetField('name', 'PT_NAME')
feat.SetField('hdop', 123)
feat.SetField('vdop', 456)
feat.SetField('pdop', 789)
feat.SetField('sat', 6)
feat.SetField('time', '2010/06/03 12:34:56')
feat.SetField('fix', '3d')
geom = ogr.CreateGeometryFromWkt('POINT(2.50 49.25)')
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = None
lyr = None
ds = None
f = open('tmp/nmea.txt', 'rt')
res = f.read()
f.close()
gdal.Unlink('tmp/nmea.txt')
if res.find('$GPRMC') == -1 or \
res.find('$GPGGA') == -1 or \
res.find('$GPGSA') == -1:
gdaltest.post_reason('did not get expected result')
print(res)
return 'fail'
return 'success'
gdaltest_list = [
ogr_gpsbabel_init,
ogr_gpsbabel_1,
ogr_gpsbabel_2,
ogr_gpsbabel_3 ]
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_gpsbabel' )
<|fim▁hole|><|fim▁end|> | gdaltest.run_tests( gdaltest_list )
gdaltest.summarize() |
<|file_name|>Thread.java<|end_file_name|><|fim▁begin|>package com.sissi.protocol.message;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlValue;
import com.sissi.io.read.Metadata;
/**
* @author kim 2014年1月28日
*/
@Metadata(uri = Message.XMLNS, localName = Thread.NAME)
@XmlRootElement
public class Thread {
public final static String NAME = "thread";
private String text;
private String parent;
public Thread() {
super();
}
public Thread(String text) {
super();
this.text = text;<|fim▁hole|> super();
this.text = text;
this.parent = parent;
}
@XmlValue
public String getText() {
return this.text;
}
public Thread setText(String text) {
this.text = text;
return this;
}
@XmlAttribute
public String getParent() {
return this.parent;
}
public Thread setParent(String parent) {
this.parent = parent;
return this;
}
public boolean content() {
return this.text != null && this.text.length() > 0;
}
}<|fim▁end|> | }
public Thread(String text, String parent) { |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from attachments.admin import AttachmentInlines
from tasks.models import Task
class TaskOptions(admin.ModelAdmin):
inlines = [AttachmentInlines]<|fim▁hole|>admin.site.register(Task, TaskOptions)<|fim▁end|> | |
<|file_name|>machine-operator-reducer-unittest.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/bits.h"
#include "src/base/division-by-constant.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/machine-operator-reducer.h"
#include "src/compiler/typer.h"
#include "test/unittests/compiler/graph-unittest.h"
#include "test/unittests/compiler/node-test-utils.h"
#include "testing/gmock-support.h"
using testing::AllOf;
using testing::BitEq;
using testing::Capture;
using testing::CaptureEq;
namespace v8 {
namespace internal {
namespace compiler {
class MachineOperatorReducerTest : public TypedGraphTest {
public:
explicit MachineOperatorReducerTest(int num_parameters = 2)
: TypedGraphTest(num_parameters), machine_(zone()) {}
protected:
Reduction Reduce(Node* node) {
JSOperatorBuilder javascript(zone());
JSGraph jsgraph(isolate(), graph(), common(), &javascript, &machine_);
MachineOperatorReducer reducer(&jsgraph);
return reducer.Reduce(node);
}
Matcher<Node*> IsTruncatingDiv(const Matcher<Node*>& dividend_matcher,
const int32_t divisor) {
base::MagicNumbersForDivision<uint32_t> const mag =
base::SignedDivisionByConstant(bit_cast<uint32_t>(divisor));
int32_t const multiplier = bit_cast<int32_t>(mag.multiplier);
int32_t const shift = bit_cast<int32_t>(mag.shift);
Matcher<Node*> quotient_matcher =
IsInt32MulHigh(dividend_matcher, IsInt32Constant(multiplier));
if (divisor > 0 && multiplier < 0) {
quotient_matcher = IsInt32Add(quotient_matcher, dividend_matcher);
} else if (divisor < 0 && multiplier > 0) {
quotient_matcher = IsInt32Sub(quotient_matcher, dividend_matcher);
}
if (shift) {
quotient_matcher = IsWord32Sar(quotient_matcher, IsInt32Constant(shift));
}
return IsInt32Add(quotient_matcher,
IsWord32Shr(dividend_matcher, IsInt32Constant(31)));
}
MachineOperatorBuilder* machine() { return &machine_; }
private:
MachineOperatorBuilder machine_;
};
template <typename T>
class MachineOperatorReducerTestWithParam
: public MachineOperatorReducerTest,
public ::testing::WithParamInterface<T> {
public:
explicit MachineOperatorReducerTestWithParam(int num_parameters = 2)
: MachineOperatorReducerTest(num_parameters) {}
~MachineOperatorReducerTestWithParam() OVERRIDE {}
};
namespace {
const float kFloat32Values[] = {
-std::numeric_limits<float>::infinity(), -2.70497e+38f, -1.4698e+37f,
-1.22813e+35f, -1.20555e+35f, -1.34584e+34f,
-1.0079e+32f, -6.49364e+26f, -3.06077e+25f,
-1.46821e+25f, -1.17658e+23f, -1.9617e+22f,
-2.7357e+20f, -1.48708e+13f, -1.89633e+12f,
-4.66622e+11f, -2.22581e+11f, -1.45381e+10f,
-1.3956e+09f, -1.32951e+09f, -1.30721e+09f,
-1.19756e+09f, -9.26822e+08f, -6.35647e+08f,
-4.00037e+08f, -1.81227e+08f, -5.09256e+07f,
-964300.0f, -192446.0f, -28455.0f,
-27194.0f, -26401.0f, -20575.0f,
-17069.0f, -9167.0f, -960.178f,
-113.0f, -62.0f, -15.0f,
-7.0f, -0.0256635f, -4.60374e-07f,
-3.63759e-10f, -4.30175e-14f, -5.27385e-15f,
-1.48084e-15f, -1.05755e-19f, -3.2995e-21f,
-1.67354e-23f, -1.11885e-23f, -1.78506e-30f,
-5.07594e-31f, -3.65799e-31f, -1.43718e-34f,
-1.27126e-38f, -0.0f, 0.0f,
1.17549e-38f, 1.56657e-37f, 4.08512e-29f,
3.31357e-28f, 6.25073e-22f, 4.1723e-13f,
1.44343e-09f, 5.27004e-08f, 9.48298e-08f,
5.57888e-07f, 4.89988e-05f, 0.244326f,
12.4895f, 19.0f, 47.0f,
106.0f, 538.324f, 564.536f,
819.124f, 7048.0f, 12611.0f,
19878.0f, 20309.0f, 797056.0f,
1.77219e+09f, 1.51116e+11f, 4.18193e+13f,
3.59167e+16f, 3.38211e+19f, 2.67488e+20f,
1.78831e+21f, 9.20914e+21f, 8.35654e+23f,
1.4495e+24f, 5.94015e+25f, 4.43608e+30f,
2.44502e+33f, 2.61152e+33f, 1.38178e+37f,
1.71306e+37f, 3.31899e+38f, 3.40282e+38f,
std::numeric_limits<float>::infinity()};
const double kFloat64Values[] = {
-V8_INFINITY, -4.23878e+275, -5.82632e+265, -6.60355e+220, -6.26172e+212,
-2.56222e+211, -4.82408e+201, -1.84106e+157, -1.63662e+127, -1.55772e+100,
-1.67813e+72, -2.3382e+55, -3.179e+30, -1.441e+09, -1.0647e+09,
-7.99361e+08, -5.77375e+08, -2.20984e+08, -32757, -13171,
-9970, -3984, -107, -105, -92,
-77, -61, -0.000208163, -1.86685e-06, -1.17296e-10,
-9.26358e-11, -5.08004e-60, -1.74753e-65, -1.06561e-71, -5.67879e-79,
-5.78459e-130, -2.90989e-171, -7.15489e-243, -3.76242e-252, -1.05639e-263,
-4.40497e-267, -2.19666e-273, -4.9998e-276, -5.59821e-278, -2.03855e-282,
-5.99335e-283, -7.17554e-284, -3.11744e-309, -0.0, 0.0,
2.22507e-308, 1.30127e-270, 7.62898e-260, 4.00313e-249, 3.16829e-233,
1.85244e-228, 2.03544e-129, 1.35126e-110, 1.01182e-106, 5.26333e-94,
1.35292e-90, 2.85394e-83, 1.78323e-77, 5.4967e-57, 1.03207e-25,
4.57401e-25, 1.58738e-05, 2, 125, 2310,
9636, 14802, 17168, 28945, 29305,
4.81336e+07, 1.41207e+08, 4.65962e+08, 1.40499e+09, 2.12648e+09,
8.80006e+30, 1.4446e+45, 1.12164e+54, 2.48188e+89, 6.71121e+102,
3.074e+112, 4.9699e+152, 5.58383e+166, 4.30654e+172, 7.08824e+185,
9.6586e+214, 2.028e+223, 6.63277e+243, 1.56192e+261, 1.23202e+269,
5.72883e+289, 8.5798e+290, 1.40256e+294, 1.79769e+308, V8_INFINITY};
const int32_t kInt32Values[] = {
std::numeric_limits<int32_t>::min(), -1914954528, -1698749618,
-1578693386, -1577976073, -1573998034,
-1529085059, -1499540537, -1299205097,
-1090814845, -938186388, -806828902,
-750927650, -520676892, -513661538,
-453036354, -433622833, -282638793,
-28375, -27788, -22770,
-18806, -14173, -11956,
-11200, -10212, -8160,
-3751, -2758, -1522,
-121, -120, -118,
-117, -106, -84,
-80, -74, -59,
-52, -48, -39,
-35, -17, -11,
-10, -9, -7,
-5, 0, 9,
12, 17, 23,
29, 31, 33,
35, 40, 47,
55, 56, 62,
64, 67, 68,
69, 74, 79,
84, 89, 90,
97, 104, 118,
124, 126, 127,
7278, 17787, 24136,
24202, 25570, 26680,
30242, 32399, 420886487,
642166225, 821912648, 822577803,
851385718, 1212241078, 1411419304,
1589626102, 1596437184, 1876245816,
1954730266, 2008792749, 2045320228,
std::numeric_limits<int32_t>::max()};
const int64_t kInt64Values[] = {
std::numeric_limits<int64_t>::min(), V8_INT64_C(-8974392461363618006),
V8_INT64_C(-8874367046689588135), V8_INT64_C(-8269197512118230839),
V8_INT64_C(-8146091527100606733), V8_INT64_C(-7550917981466150848),
V8_INT64_C(-7216590251577894337), V8_INT64_C(-6464086891160048440),
V8_INT64_C(-6365616494908257190), V8_INT64_C(-6305630541365849726),
V8_INT64_C(-5982222642272245453), V8_INT64_C(-5510103099058504169),
V8_INT64_C(-5496838675802432701), V8_INT64_C(-4047626578868642657),
V8_INT64_C(-4033755046900164544), V8_INT64_C(-3554299241457877041),
V8_INT64_C(-2482258764588614470), V8_INT64_C(-1688515425526875335),
V8_INT64_C(-924784137176548532), V8_INT64_C(-725316567157391307),
V8_INT64_C(-439022654781092241), V8_INT64_C(-105545757668917080),
V8_INT64_C(-2088319373), V8_INT64_C(-2073699916),
V8_INT64_C(-1844949911), V8_INT64_C(-1831090548),
V8_INT64_C(-1756711933), V8_INT64_C(-1559409497),
V8_INT64_C(-1281179700), V8_INT64_C(-1211513985),
V8_INT64_C(-1182371520), V8_INT64_C(-785934753),
V8_INT64_C(-767480697), V8_INT64_C(-705745662),
V8_INT64_C(-514362436), V8_INT64_C(-459916580),
V8_INT64_C(-312328082), V8_INT64_C(-302949707),
V8_INT64_C(-285499304), V8_INT64_C(-125701262),
V8_INT64_C(-95139843), V8_INT64_C(-32768),
V8_INT64_C(-27542), V8_INT64_C(-23600),
V8_INT64_C(-18582), V8_INT64_C(-17770),
V8_INT64_C(-9086), V8_INT64_C(-9010),
V8_INT64_C(-8244), V8_INT64_C(-2890),
V8_INT64_C(-103), V8_INT64_C(-34),
V8_INT64_C(-27), V8_INT64_C(-25),
V8_INT64_C(-9), V8_INT64_C(-7),
V8_INT64_C(0), V8_INT64_C(2),
V8_INT64_C(38), V8_INT64_C(58),
V8_INT64_C(65), V8_INT64_C(93),
V8_INT64_C(111), V8_INT64_C(1003),
V8_INT64_C(1267), V8_INT64_C(12797),
V8_INT64_C(23122), V8_INT64_C(28200),
V8_INT64_C(30888), V8_INT64_C(42648848),
V8_INT64_C(116836693), V8_INT64_C(263003643),
V8_INT64_C(571039860), V8_INT64_C(1079398689),
V8_INT64_C(1145196402), V8_INT64_C(1184846321),
V8_INT64_C(1758281648), V8_INT64_C(1859991374),
V8_INT64_C(1960251588), V8_INT64_C(2042443199),
V8_INT64_C(296220586027987448), V8_INT64_C(1015494173071134726),
V8_INT64_C(1151237951914455318), V8_INT64_C(1331941174616854174),
V8_INT64_C(2022020418667972654), V8_INT64_C(2450251424374977035),
V8_INT64_C(3668393562685561486), V8_INT64_C(4858229301215502171),
V8_INT64_C(4919426235170669383), V8_INT64_C(5034286595330341762),
V8_INT64_C(5055797915536941182), V8_INT64_C(6072389716149252074),
V8_INT64_C(6185309910199801210), V8_INT64_C(6297328311011094138),
V8_INT64_C(6932372858072165827), V8_INT64_C(8483640924987737210),
V8_INT64_C(8663764179455849203), V8_INT64_C(8877197042645298254),
V8_INT64_C(8901543506779157333), std::numeric_limits<int64_t>::max()};
const uint32_t kUint32Values[] = {
0x00000000, 0x00000001, 0xffffffff, 0x1b09788b, 0x04c5fce8, 0xcc0de5bf,
0x273a798e, 0x187937a3, 0xece3af83, 0x5495a16b, 0x0b668ecc, 0x11223344,
0x0000009e, 0x00000043, 0x0000af73, 0x0000116b, 0x00658ecc, 0x002b3b4c,
0x88776655, 0x70000000, 0x07200000, 0x7fffffff, 0x56123761, 0x7fffff00,
0x761c4761, 0x80000000, 0x88888888, 0xa0000000, 0xdddddddd, 0xe0000000,
0xeeeeeeee, 0xfffffffd, 0xf0000000, 0x007fffff, 0x003fffff, 0x001fffff,
0x000fffff, 0x0007ffff, 0x0003ffff, 0x0001ffff, 0x0000ffff, 0x00007fff,
0x00003fff, 0x00001fff, 0x00000fff, 0x000007ff, 0x000003ff, 0x000001ff};
struct ComparisonBinaryOperator {
const Operator* (MachineOperatorBuilder::*constructor)();
const char* constructor_name;
};
std::ostream& operator<<(std::ostream& os,
ComparisonBinaryOperator const& cbop) {
return os << cbop.constructor_name;
}
const ComparisonBinaryOperator kComparisonBinaryOperators[] = {
#define OPCODE(Opcode) \
{ &MachineOperatorBuilder::Opcode, #Opcode } \
,
MACHINE_COMPARE_BINOP_LIST(OPCODE)
#undef OPCODE
};
} // namespace
// -----------------------------------------------------------------------------
// Unary operators
namespace {
struct UnaryOperator {
const Operator* (MachineOperatorBuilder::*constructor)();
const char* constructor_name;
};
std::ostream& operator<<(std::ostream& os, const UnaryOperator& unop) {
return os << unop.constructor_name;
}
static const UnaryOperator kUnaryOperators[] = {
{&MachineOperatorBuilder::ChangeInt32ToFloat64, "ChangeInt32ToFloat64"},
{&MachineOperatorBuilder::ChangeUint32ToFloat64, "ChangeUint32ToFloat64"},
{&MachineOperatorBuilder::ChangeFloat64ToInt32, "ChangeFloat64ToInt32"},
{&MachineOperatorBuilder::ChangeFloat64ToUint32, "ChangeFloat64ToUint32"},
{&MachineOperatorBuilder::ChangeInt32ToInt64, "ChangeInt32ToInt64"},
{&MachineOperatorBuilder::ChangeUint32ToUint64, "ChangeUint32ToUint64"},
{&MachineOperatorBuilder::TruncateFloat64ToInt32, "TruncateFloat64ToInt32"},
{&MachineOperatorBuilder::TruncateInt64ToInt32, "TruncateInt64ToInt32"}};
} // namespace
typedef MachineOperatorReducerTestWithParam<UnaryOperator>
MachineUnaryOperatorReducerTest;
TEST_P(MachineUnaryOperatorReducerTest, Parameter) {
const UnaryOperator unop = GetParam();
Reduction reduction =
Reduce(graph()->NewNode((machine()->*unop.constructor)(), Parameter(0)));
EXPECT_FALSE(reduction.Changed());
}
INSTANTIATE_TEST_CASE_P(MachineOperatorReducerTest,
MachineUnaryOperatorReducerTest,
::testing::ValuesIn(kUnaryOperators));
// -----------------------------------------------------------------------------
// ChangeFloat64ToFloat32
TEST_F(MachineOperatorReducerTest, ChangeFloat64ToFloat32WithConstant) {
TRACED_FOREACH(float, x, kFloat32Values) {
Reduction reduction = Reduce(graph()->NewNode(
machine()->ChangeFloat32ToFloat64(), Float32Constant(x)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsFloat64Constant(BitEq<double>(x)));
}
}
// -----------------------------------------------------------------------------
// ChangeFloat64ToInt32
TEST_F(MachineOperatorReducerTest,
ChangeFloat64ToInt32WithChangeInt32ToFloat64) {
Node* value = Parameter(0);
Reduction reduction = Reduce(graph()->NewNode(
machine()->ChangeFloat64ToInt32(),
graph()->NewNode(machine()->ChangeInt32ToFloat64(), value)));
ASSERT_TRUE(reduction.Changed());
EXPECT_EQ(value, reduction.replacement());
}
TEST_F(MachineOperatorReducerTest, ChangeFloat64ToInt32WithConstant) {
TRACED_FOREACH(int32_t, x, kInt32Values) {
Reduction reduction = Reduce(graph()->NewNode(
machine()->ChangeFloat64ToInt32(), Float64Constant(FastI2D(x))));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsInt32Constant(x));
}
}
// -----------------------------------------------------------------------------
// ChangeFloat64ToUint32
TEST_F(MachineOperatorReducerTest,
ChangeFloat64ToUint32WithChangeUint32ToFloat64) {
Node* value = Parameter(0);
Reduction reduction = Reduce(graph()->NewNode(
machine()->ChangeFloat64ToUint32(),
graph()->NewNode(machine()->ChangeUint32ToFloat64(), value)));
ASSERT_TRUE(reduction.Changed());
EXPECT_EQ(value, reduction.replacement());
}
TEST_F(MachineOperatorReducerTest, ChangeFloat64ToUint32WithConstant) {
TRACED_FOREACH(uint32_t, x, kUint32Values) {
Reduction reduction = Reduce(graph()->NewNode(
machine()->ChangeFloat64ToUint32(), Float64Constant(FastUI2D(x))));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsInt32Constant(bit_cast<int32_t>(x)));
}
}
// -----------------------------------------------------------------------------
// ChangeInt32ToFloat64
TEST_F(MachineOperatorReducerTest, ChangeInt32ToFloat64WithConstant) {
TRACED_FOREACH(int32_t, x, kInt32Values) {
Reduction reduction = Reduce(
graph()->NewNode(machine()->ChangeInt32ToFloat64(), Int32Constant(x)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsFloat64Constant(BitEq(FastI2D(x))));
}
}
// -----------------------------------------------------------------------------
// ChangeInt32ToInt64
TEST_F(MachineOperatorReducerTest, ChangeInt32ToInt64WithConstant) {
TRACED_FOREACH(int32_t, x, kInt32Values) {
Reduction reduction = Reduce(
graph()->NewNode(machine()->ChangeInt32ToInt64(), Int32Constant(x)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsInt64Constant(x));
}
}
// -----------------------------------------------------------------------------
// ChangeUint32ToFloat64
TEST_F(MachineOperatorReducerTest, ChangeUint32ToFloat64WithConstant) {
TRACED_FOREACH(uint32_t, x, kUint32Values) {
Reduction reduction =
Reduce(graph()->NewNode(machine()->ChangeUint32ToFloat64(),
Int32Constant(bit_cast<int32_t>(x))));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsFloat64Constant(BitEq(FastUI2D(x))));
}
}
// -----------------------------------------------------------------------------
// ChangeUint32ToUint64
TEST_F(MachineOperatorReducerTest, ChangeUint32ToUint64WithConstant) {
TRACED_FOREACH(uint32_t, x, kUint32Values) {
Reduction reduction =
Reduce(graph()->NewNode(machine()->ChangeUint32ToUint64(),
Int32Constant(bit_cast<int32_t>(x))));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(),
IsInt64Constant(bit_cast<int64_t>(static_cast<uint64_t>(x))));
}
}
// -----------------------------------------------------------------------------
// TruncateFloat64ToFloat32
TEST_F(MachineOperatorReducerTest,
TruncateFloat64ToFloat32WithChangeFloat32ToFloat64) {
Node* value = Parameter(0);
Reduction reduction = Reduce(graph()->NewNode(
machine()->TruncateFloat64ToFloat32(),
graph()->NewNode(machine()->ChangeFloat32ToFloat64(), value)));
ASSERT_TRUE(reduction.Changed());
EXPECT_EQ(value, reduction.replacement());
}
TEST_F(MachineOperatorReducerTest, TruncateFloat64ToFloat32WithConstant) {
TRACED_FOREACH(double, x, kFloat64Values) {
Reduction reduction = Reduce(graph()->NewNode(
machine()->TruncateFloat64ToFloat32(), Float64Constant(x)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(),
IsFloat32Constant(BitEq(DoubleToFloat32(x))));
}
}
// -----------------------------------------------------------------------------
// TruncateFloat64ToInt32
TEST_F(MachineOperatorReducerTest,
TruncateFloat64ToInt32WithChangeInt32ToFloat64) {
Node* value = Parameter(0);
Reduction reduction = Reduce(graph()->NewNode(
machine()->TruncateFloat64ToInt32(),
graph()->NewNode(machine()->ChangeInt32ToFloat64(), value)));
ASSERT_TRUE(reduction.Changed());
EXPECT_EQ(value, reduction.replacement());
}
TEST_F(MachineOperatorReducerTest, TruncateFloat64ToInt32WithConstant) {
TRACED_FOREACH(double, x, kFloat64Values) {
Reduction reduction = Reduce(graph()->NewNode(
machine()->TruncateFloat64ToInt32(), Float64Constant(x)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(), IsInt32Constant(DoubleToInt32(x)));
}
}
TEST_F(MachineOperatorReducerTest, TruncateFloat64ToInt32WithPhi) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
Node* const merge = graph()->start();
Reduction reduction = Reduce(graph()->NewNode(
machine()->TruncateFloat64ToInt32(),
graph()->NewNode(common()->Phi(kMachFloat64, 2), p0, p1, merge)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(),
IsPhi(kMachInt32, IsTruncateFloat64ToInt32(p0),
IsTruncateFloat64ToInt32(p1), merge));
}
// -----------------------------------------------------------------------------
// TruncateInt64ToInt32
TEST_F(MachineOperatorReducerTest, TruncateInt64ToInt32WithChangeInt32ToInt64) {
Node* value = Parameter(0);
Reduction reduction = Reduce(graph()->NewNode(
machine()->TruncateInt64ToInt32(),
graph()->NewNode(machine()->ChangeInt32ToInt64(), value)));
ASSERT_TRUE(reduction.Changed());
EXPECT_EQ(value, reduction.replacement());
}
TEST_F(MachineOperatorReducerTest, TruncateInt64ToInt32WithConstant) {
TRACED_FOREACH(int64_t, x, kInt64Values) {
Reduction reduction = Reduce(
graph()->NewNode(machine()->TruncateInt64ToInt32(), Int64Constant(x)));
ASSERT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(),
IsInt32Constant(bit_cast<int32_t>(
static_cast<uint32_t>(bit_cast<uint64_t>(x)))));
}
}
// -----------------------------------------------------------------------------
// Word32And
TEST_F(MachineOperatorReducerTest, Word32AndWithWord32ShlWithConstant) {
Node* const p0 = Parameter(0);
TRACED_FORRANGE(int32_t, l, 1, 31) {
TRACED_FORRANGE(int32_t, k, 1, l) {
// (x << L) & (-1 << K) => x << L
Reduction const r1 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Word32Shl(), p0, Int32Constant(l)),
Int32Constant(-1 << k)));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(), IsWord32Shl(p0, IsInt32Constant(l)));
// (-1 << K) & (x << L) => x << L
Reduction const r2 = Reduce(graph()->NewNode(
machine()->Word32And(), Int32Constant(-1 << k),
graph()->NewNode(machine()->Word32Shl(), p0, Int32Constant(l))));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(), IsWord32Shl(p0, IsInt32Constant(l)));
}
}
}
TEST_F(MachineOperatorReducerTest, Word32AndWithWord32AndWithConstant) {
Node* const p0 = Parameter(0);
TRACED_FOREACH(int32_t, k, kInt32Values) {
TRACED_FOREACH(int32_t, l, kInt32Values) {
if (k == 0 || k == -1 || l == 0 || l == -1) continue;
// (x & K) & L => x & (K & L)
Reduction const r1 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Word32And(), p0, Int32Constant(k)),
Int32Constant(l)));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(),
(k & l) ? IsWord32And(p0, IsInt32Constant(k & l))
: IsInt32Constant(0));
// (K & x) & L => x & (K & L)
Reduction const r2 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Word32And(), Int32Constant(k), p0),
Int32Constant(l)));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(),
(k & l) ? IsWord32And(p0, IsInt32Constant(k & l))
: IsInt32Constant(0));
}
}
}
TEST_F(MachineOperatorReducerTest, Word32AndWithInt32AddAndConstant) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
TRACED_FORRANGE(int32_t, l, 1, 31) {
TRACED_FOREACH(int32_t, k, kInt32Values) {
if ((k << l) == 0) continue;
// (x + (K << L)) & (-1 << L) => (x & (-1 << L)) + (K << L)
Reduction const r = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Int32Add(), p0, Int32Constant(k << l)),
Int32Constant(-1 << l)));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Add(IsWord32And(p0, IsInt32Constant(-1 << l)),
IsInt32Constant(k << l)));
}
Node* s1 = graph()->NewNode(machine()->Word32Shl(), p1, Int32Constant(l));
// (y << L + x) & (-1 << L) => (x & (-1 << L)) + y << L
Reduction const r1 = Reduce(graph()->NewNode(
machine()->Word32And(), graph()->NewNode(machine()->Int32Add(), s1, p0),
Int32Constant(-1 << l)));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(),
IsInt32Add(IsWord32And(p0, IsInt32Constant(-1 << l)), s1));
// (x + y << L) & (-1 << L) => (x & (-1 << L)) + y << L
Reduction const r2 = Reduce(graph()->NewNode(
machine()->Word32And(), graph()->NewNode(machine()->Int32Add(), p0, s1),
Int32Constant(-1 << l)));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(),
IsInt32Add(IsWord32And(p0, IsInt32Constant(-1 << l)), s1));
}
}
TEST_F(MachineOperatorReducerTest, Word32AndWithInt32MulAndConstant) {
Node* const p0 = Parameter(0);
TRACED_FORRANGE(int32_t, l, 1, 31) {
TRACED_FOREACH(int32_t, k, kInt32Values) {
if ((k << l) == 0) continue;
// (x * (K << L)) & (-1 << L) => x * (K << L)
Reduction const r1 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Int32Mul(), p0, Int32Constant(k << l)),
Int32Constant(-1 << l)));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(), IsInt32Mul(p0, IsInt32Constant(k << l)));
// ((K << L) * x) & (-1 << L) => x * (K << L)
Reduction const r2 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Int32Mul(), Int32Constant(k << l), p0),
Int32Constant(-1 << l)));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(), IsInt32Mul(p0, IsInt32Constant(k << l)));
}
}
}
TEST_F(MachineOperatorReducerTest,
Word32AndWithInt32AddAndInt32MulAndConstant) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
TRACED_FORRANGE(int32_t, l, 1, 31) {
TRACED_FOREACH(int32_t, k, kInt32Values) {
if ((k << l) == 0) continue;
// (y * (K << L) + x) & (-1 << L) => (x & (-1 << L)) + y * (K << L)
Reduction const r1 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Int32Add(),
graph()->NewNode(machine()->Int32Mul(), p1,
Int32Constant(k << l)),
p0),
Int32Constant(-1 << l)));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(),
IsInt32Add(IsWord32And(p0, IsInt32Constant(-1 << l)),
IsInt32Mul(p1, IsInt32Constant(k << l))));
// (x + y * (K << L)) & (-1 << L) => (x & (-1 << L)) + y * (K << L)
Reduction const r2 = Reduce(graph()->NewNode(
machine()->Word32And(),
graph()->NewNode(machine()->Int32Add(), p0,
graph()->NewNode(machine()->Int32Mul(), p1,
Int32Constant(k << l))),
Int32Constant(-1 << l)));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(),
IsInt32Add(IsWord32And(p0, IsInt32Constant(-1 << l)),
IsInt32Mul(p1, IsInt32Constant(k << l))));
}
}
}
TEST_F(MachineOperatorReducerTest, Word32AndWithComparisonAndConstantOne) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
TRACED_FOREACH(ComparisonBinaryOperator, cbop, kComparisonBinaryOperators) {
Node* cmp = graph()->NewNode((machine()->*cbop.constructor)(), p0, p1);
// cmp & 1 => cmp
Reduction const r1 =
Reduce(graph()->NewNode(machine()->Word32And(), cmp, Int32Constant(1)));
ASSERT_TRUE(r1.Changed());
EXPECT_EQ(cmp, r1.replacement());
// 1 & cmp => cmp
Reduction const r2 =
Reduce(graph()->NewNode(machine()->Word32And(), Int32Constant(1), cmp));
ASSERT_TRUE(r2.Changed());
EXPECT_EQ(cmp, r2.replacement());
}
}
// -----------------------------------------------------------------------------
// Word32Xor
TEST_F(MachineOperatorReducerTest, Word32XorWithWord32XorAndMinusOne) {
Node* const p0 = Parameter(0);
// (x ^ -1) ^ -1 => x
Reduction r1 = Reduce(graph()->NewNode(
machine()->Word32Xor(),
graph()->NewNode(machine()->Word32Xor(), p0, Int32Constant(-1)),
Int32Constant(-1)));
ASSERT_TRUE(r1.Changed());
EXPECT_EQ(r1.replacement(), p0);
// -1 ^ (x ^ -1) => x
Reduction r2 = Reduce(graph()->NewNode(
machine()->Word32Xor(), Int32Constant(-1),
graph()->NewNode(machine()->Word32Xor(), p0, Int32Constant(-1))));
ASSERT_TRUE(r2.Changed());
EXPECT_EQ(r2.replacement(), p0);
// (-1 ^ x) ^ -1 => x
Reduction r3 = Reduce(graph()->NewNode(
machine()->Word32Xor(),
graph()->NewNode(machine()->Word32Xor(), Int32Constant(-1), p0),
Int32Constant(-1)));
ASSERT_TRUE(r3.Changed());
EXPECT_EQ(r3.replacement(), p0);
// -1 ^ (-1 ^ x) => x
Reduction r4 = Reduce(graph()->NewNode(
machine()->Word32Xor(), Int32Constant(-1),
graph()->NewNode(machine()->Word32Xor(), Int32Constant(-1), p0)));
ASSERT_TRUE(r4.Changed());
EXPECT_EQ(r4.replacement(), p0);
}
// -----------------------------------------------------------------------------
// Word32Ror
TEST_F(MachineOperatorReducerTest, ReduceToWord32RorWithParameters) {
Node* value = Parameter(0);
Node* shift = Parameter(1);
Node* sub = graph()->NewNode(machine()->Int32Sub(), Int32Constant(32), shift);
// Testing rotate left.
Node* shl_l = graph()->NewNode(machine()->Word32Shl(), value, shift);
Node* shr_l = graph()->NewNode(machine()->Word32Shr(), value, sub);
// (x << y) | (x >>> (32 - y)) => x ror (32 - y)
Node* node1 = graph()->NewNode(machine()->Word32Or(), shl_l, shr_l);
Reduction reduction1 = Reduce(node1);
EXPECT_TRUE(reduction1.Changed());
EXPECT_EQ(reduction1.replacement(), node1);
EXPECT_THAT(reduction1.replacement(), IsWord32Ror(value, sub));
// (x >>> (32 - y)) | (x << y) => x ror (32 - y)
Node* node2 = graph()->NewNode(machine()->Word32Or(), shr_l, shl_l);
Reduction reduction2 = Reduce(node2);
EXPECT_TRUE(reduction2.Changed());
EXPECT_EQ(reduction2.replacement(), node2);
EXPECT_THAT(reduction2.replacement(), IsWord32Ror(value, sub));
// Testing rotate right.
Node* shl_r = graph()->NewNode(machine()->Word32Shl(), value, sub);
Node* shr_r = graph()->NewNode(machine()->Word32Shr(), value, shift);
// (x << (32 - y)) | (x >>> y) => x ror y
Node* node3 = graph()->NewNode(machine()->Word32Or(), shl_r, shr_r);
Reduction reduction3 = Reduce(node3);
EXPECT_TRUE(reduction3.Changed());
EXPECT_EQ(reduction3.replacement(), node3);
EXPECT_THAT(reduction3.replacement(), IsWord32Ror(value, shift));
// (x >>> y) | (x << (32 - y)) => x ror y
Node* node4 = graph()->NewNode(machine()->Word32Or(), shr_r, shl_r);
Reduction reduction4 = Reduce(node4);
EXPECT_TRUE(reduction4.Changed());
EXPECT_EQ(reduction4.replacement(), node4);
EXPECT_THAT(reduction4.replacement(), IsWord32Ror(value, shift));
}
TEST_F(MachineOperatorReducerTest, ReduceToWord32RorWithConstant) {
Node* value = Parameter(0);
TRACED_FORRANGE(int32_t, k, 0, 31) {
Node* shl =<|fim▁hole|>
// (x << K) | (x >>> ((32 - K) - y)) => x ror (32 - K)
Node* node1 = graph()->NewNode(machine()->Word32Or(), shl, shr);
Reduction reduction1 = Reduce(node1);
EXPECT_TRUE(reduction1.Changed());
EXPECT_EQ(reduction1.replacement(), node1);
EXPECT_THAT(reduction1.replacement(),
IsWord32Ror(value, IsInt32Constant(32 - k)));
// (x >>> (32 - K)) | (x << K) => x ror (32 - K)
Node* node2 = graph()->NewNode(machine()->Word32Or(), shr, shl);
Reduction reduction2 = Reduce(node2);
EXPECT_TRUE(reduction2.Changed());
EXPECT_EQ(reduction2.replacement(), node2);
EXPECT_THAT(reduction2.replacement(),
IsWord32Ror(value, IsInt32Constant(32 - k)));
}
}
TEST_F(MachineOperatorReducerTest, Word32RorWithZeroShift) {
Node* value = Parameter(0);
Node* node =
graph()->NewNode(machine()->Word32Ror(), value, Int32Constant(0));
Reduction reduction = Reduce(node);
EXPECT_TRUE(reduction.Changed());
EXPECT_EQ(reduction.replacement(), value);
}
TEST_F(MachineOperatorReducerTest, Word32RorWithConstants) {
TRACED_FOREACH(int32_t, x, kUint32Values) {
TRACED_FORRANGE(int32_t, y, 0, 31) {
Node* node = graph()->NewNode(machine()->Word32Ror(), Int32Constant(x),
Int32Constant(y));
Reduction reduction = Reduce(node);
EXPECT_TRUE(reduction.Changed());
EXPECT_THAT(reduction.replacement(),
IsInt32Constant(base::bits::RotateRight32(x, y)));
}
}
}
// -----------------------------------------------------------------------------
// Word32Sar
TEST_F(MachineOperatorReducerTest, Word32SarWithWord32ShlAndComparison) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
TRACED_FOREACH(ComparisonBinaryOperator, cbop, kComparisonBinaryOperators) {
Node* cmp = graph()->NewNode((machine()->*cbop.constructor)(), p0, p1);
// cmp << 31 >> 31 => 0 - cmp
Reduction const r = Reduce(graph()->NewNode(
machine()->Word32Sar(),
graph()->NewNode(machine()->Word32Shl(), cmp, Int32Constant(31)),
Int32Constant(31)));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Sub(IsInt32Constant(0), cmp));
}
}
TEST_F(MachineOperatorReducerTest, Word32SarWithWord32ShlAndLoad) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
{
Node* const l = graph()->NewNode(machine()->Load(kMachInt8), p0, p1,
graph()->start(), graph()->start());
Reduction const r = Reduce(graph()->NewNode(
machine()->Word32Sar(),
graph()->NewNode(machine()->Word32Shl(), l, Int32Constant(24)),
Int32Constant(24)));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(l, r.replacement());
}
{
Node* const l = graph()->NewNode(machine()->Load(kMachInt16), p0, p1,
graph()->start(), graph()->start());
Reduction const r = Reduce(graph()->NewNode(
machine()->Word32Sar(),
graph()->NewNode(machine()->Word32Shl(), l, Int32Constant(16)),
Int32Constant(16)));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(l, r.replacement());
}
}
// -----------------------------------------------------------------------------
// Word32Shl
TEST_F(MachineOperatorReducerTest, Word32ShlWithZeroShift) {
Node* p0 = Parameter(0);
Node* node = graph()->NewNode(machine()->Word32Shl(), p0, Int32Constant(0));
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
EXPECT_EQ(p0, r.replacement());
}
TEST_F(MachineOperatorReducerTest, Word32ShlWithWord32Sar) {
Node* p0 = Parameter(0);
TRACED_FORRANGE(int32_t, x, 1, 31) {
Node* node = graph()->NewNode(
machine()->Word32Shl(),
graph()->NewNode(machine()->Word32Sar(), p0, Int32Constant(x)),
Int32Constant(x));
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
int32_t m = bit_cast<int32_t>(~((1U << x) - 1U));
EXPECT_THAT(r.replacement(), IsWord32And(p0, IsInt32Constant(m)));
}
}
TEST_F(MachineOperatorReducerTest,
Word32ShlWithWord32SarAndInt32AddAndConstant) {
Node* const p0 = Parameter(0);
TRACED_FOREACH(int32_t, k, kInt32Values) {
TRACED_FORRANGE(int32_t, l, 1, 31) {
if ((k << l) == 0) continue;
// (x + (K << L)) >> L << L => (x & (-1 << L)) + (K << L)
Reduction const r = Reduce(graph()->NewNode(
machine()->Word32Shl(),
graph()->NewNode(machine()->Word32Sar(),
graph()->NewNode(machine()->Int32Add(), p0,
Int32Constant(k << l)),
Int32Constant(l)),
Int32Constant(l)));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Add(IsWord32And(p0, IsInt32Constant(-1 << l)),
IsInt32Constant(k << l)));
}
}
}
TEST_F(MachineOperatorReducerTest, Word32ShlWithWord32Shr) {
Node* p0 = Parameter(0);
TRACED_FORRANGE(int32_t, x, 1, 31) {
Node* node = graph()->NewNode(
machine()->Word32Shl(),
graph()->NewNode(machine()->Word32Shr(), p0, Int32Constant(x)),
Int32Constant(x));
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
int32_t m = bit_cast<int32_t>(~((1U << x) - 1U));
EXPECT_THAT(r.replacement(), IsWord32And(p0, IsInt32Constant(m)));
}
}
// -----------------------------------------------------------------------------
// Int32Sub
TEST_F(MachineOperatorReducerTest, Int32SubWithConstant) {
Node* const p0 = Parameter(0);
TRACED_FOREACH(int32_t, k, kInt32Values) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Int32Sub(), p0, Int32Constant(k)));
ASSERT_TRUE(r.Changed());
if (k == 0) {
EXPECT_EQ(p0, r.replacement());
} else {
EXPECT_THAT(r.replacement(), IsInt32Add(p0, IsInt32Constant(-k)));
}
}
}
// -----------------------------------------------------------------------------
// Int32Div
TEST_F(MachineOperatorReducerTest, Int32DivWithConstant) {
Node* const p0 = Parameter(0);
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(0), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(1), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(r.replacement(), p0);
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(-1), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Sub(IsInt32Constant(0), p0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(2), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsWord32Sar(IsInt32Add(IsWord32Shr(p0, IsInt32Constant(31)), p0),
IsInt32Constant(1)));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(-2), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsInt32Sub(
IsInt32Constant(0),
IsWord32Sar(IsInt32Add(IsWord32Shr(p0, IsInt32Constant(31)), p0),
IsInt32Constant(1))));
}
TRACED_FORRANGE(int32_t, shift, 2, 30) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Int32Div(), p0,
Int32Constant(1 << shift), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsWord32Sar(IsInt32Add(IsWord32Shr(IsWord32Sar(p0, IsInt32Constant(31)),
IsInt32Constant(32 - shift)),
p0),
IsInt32Constant(shift)));
}
TRACED_FORRANGE(int32_t, shift, 2, 31) {
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0,
Uint32Constant(bit_cast<uint32_t, int32_t>(-1) << shift),
graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsInt32Sub(
IsInt32Constant(0),
IsWord32Sar(
IsInt32Add(IsWord32Shr(IsWord32Sar(p0, IsInt32Constant(31)),
IsInt32Constant(32 - shift)),
p0),
IsInt32Constant(shift))));
}
TRACED_FOREACH(int32_t, divisor, kInt32Values) {
if (divisor < 0) {
if (base::bits::IsPowerOfTwo32(-divisor)) continue;
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(divisor), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Sub(IsInt32Constant(0),
IsTruncatingDiv(p0, -divisor)));
} else if (divisor > 0) {
if (base::bits::IsPowerOfTwo32(divisor)) continue;
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Div(), p0, Int32Constant(divisor), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsTruncatingDiv(p0, divisor));
}
}
}
TEST_F(MachineOperatorReducerTest, Int32DivWithParameters) {
Node* const p0 = Parameter(0);
Reduction const r =
Reduce(graph()->NewNode(machine()->Int32Div(), p0, p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsWord32Equal(IsWord32Equal(p0, IsInt32Constant(0)), IsInt32Constant(0)));
}
// -----------------------------------------------------------------------------
// Uint32Div
TEST_F(MachineOperatorReducerTest, Uint32DivWithConstant) {
Node* const p0 = Parameter(0);
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Uint32Div(), Int32Constant(0), p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Uint32Div(), p0, Int32Constant(0), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Uint32Div(), p0, Int32Constant(1), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(r.replacement(), p0);
}
TRACED_FOREACH(uint32_t, dividend, kUint32Values) {
TRACED_FOREACH(uint32_t, divisor, kUint32Values) {
Reduction const r = Reduce(
graph()->NewNode(machine()->Uint32Div(), Uint32Constant(dividend),
Uint32Constant(divisor), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Constant(bit_cast<int32_t>(
base::bits::UnsignedDiv32(dividend, divisor))));
}
}
TRACED_FORRANGE(uint32_t, shift, 1, 31) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Uint32Div(), p0,
Uint32Constant(1u << shift), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsWord32Shr(p0, IsInt32Constant(bit_cast<int32_t>(shift))));
}
}
TEST_F(MachineOperatorReducerTest, Uint32DivWithParameters) {
Node* const p0 = Parameter(0);
Reduction const r = Reduce(
graph()->NewNode(machine()->Uint32Div(), p0, p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsWord32Equal(IsWord32Equal(p0, IsInt32Constant(0)), IsInt32Constant(0)));
}
// -----------------------------------------------------------------------------
// Int32Mod
TEST_F(MachineOperatorReducerTest, Int32ModWithConstant) {
Node* const p0 = Parameter(0);
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Mod(), Int32Constant(0), p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Mod(), p0, Int32Constant(0), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Mod(), p0, Int32Constant(1), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Mod(), p0, Int32Constant(-1), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
TRACED_FOREACH(int32_t, dividend, kInt32Values) {
TRACED_FOREACH(int32_t, divisor, kInt32Values) {
Reduction const r = Reduce(
graph()->NewNode(machine()->Int32Mod(), Int32Constant(dividend),
Int32Constant(divisor), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Constant(base::bits::SignedMod32(dividend, divisor)));
}
}
TRACED_FORRANGE(int32_t, shift, 1, 30) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Int32Mod(), p0,
Int32Constant(1 << shift), graph()->start()));
int32_t const mask = (1 << shift) - 1;
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsSelect(kMachInt32, IsInt32LessThan(p0, IsInt32Constant(0)),
IsInt32Sub(IsInt32Constant(0),
IsWord32And(IsInt32Sub(IsInt32Constant(0), p0),
IsInt32Constant(mask))),
IsWord32And(p0, IsInt32Constant(mask))));
}
TRACED_FORRANGE(int32_t, shift, 1, 31) {
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Mod(), p0,
Uint32Constant(bit_cast<uint32_t, int32_t>(-1) << shift),
graph()->start()));
int32_t const mask = bit_cast<int32_t, uint32_t>((1U << shift) - 1);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsSelect(kMachInt32, IsInt32LessThan(p0, IsInt32Constant(0)),
IsInt32Sub(IsInt32Constant(0),
IsWord32And(IsInt32Sub(IsInt32Constant(0), p0),
IsInt32Constant(mask))),
IsWord32And(p0, IsInt32Constant(mask))));
}
TRACED_FOREACH(int32_t, divisor, kInt32Values) {
if (divisor == 0 || base::bits::IsPowerOfTwo32(Abs(divisor))) continue;
Reduction const r = Reduce(graph()->NewNode(
machine()->Int32Mod(), p0, Int32Constant(divisor), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Sub(p0, IsInt32Mul(IsTruncatingDiv(p0, Abs(divisor)),
IsInt32Constant(Abs(divisor)))));
}
}
TEST_F(MachineOperatorReducerTest, Int32ModWithParameters) {
Node* const p0 = Parameter(0);
Reduction const r =
Reduce(graph()->NewNode(machine()->Int32Mod(), p0, p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
// -----------------------------------------------------------------------------
// Uint32Mod
TEST_F(MachineOperatorReducerTest, Uint32ModWithConstant) {
Node* const p0 = Parameter(0);
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Uint32Mod(), p0, Int32Constant(0), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Uint32Mod(), Int32Constant(0), p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
{
Reduction const r = Reduce(graph()->NewNode(
machine()->Uint32Mod(), p0, Int32Constant(1), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
TRACED_FOREACH(uint32_t, dividend, kUint32Values) {
TRACED_FOREACH(uint32_t, divisor, kUint32Values) {
Reduction const r = Reduce(
graph()->NewNode(machine()->Uint32Mod(), Uint32Constant(dividend),
Uint32Constant(divisor), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Constant(bit_cast<int32_t>(
base::bits::UnsignedMod32(dividend, divisor))));
}
}
TRACED_FORRANGE(uint32_t, shift, 1, 31) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Uint32Mod(), p0,
Uint32Constant(1u << shift), graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsWord32And(p0, IsInt32Constant(
bit_cast<int32_t>((1u << shift) - 1u))));
}
}
TEST_F(MachineOperatorReducerTest, Uint32ModWithParameters) {
Node* const p0 = Parameter(0);
Reduction const r = Reduce(
graph()->NewNode(machine()->Uint32Mod(), p0, p0, graph()->start()));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
}
// -----------------------------------------------------------------------------
// Int32Add
TEST_F(MachineOperatorReducerTest, Int32AddWithInt32SubWithConstantZero) {
Node* const p0 = Parameter(0);
Node* const p1 = Parameter(1);
Reduction const r1 = Reduce(graph()->NewNode(
machine()->Int32Add(),
graph()->NewNode(machine()->Int32Sub(), Int32Constant(0), p0), p1));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(), IsInt32Sub(p1, p0));
Reduction const r2 = Reduce(graph()->NewNode(
machine()->Int32Add(), p0,
graph()->NewNode(machine()->Int32Sub(), Int32Constant(0), p1)));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(), IsInt32Sub(p0, p1));
}
// -----------------------------------------------------------------------------
// Int32AddWithOverflow
TEST_F(MachineOperatorReducerTest, Int32AddWithOverflowWithZero) {
Node* p0 = Parameter(0);
{
Node* add = graph()->NewNode(machine()->Int32AddWithOverflow(),
Int32Constant(0), p0);
Reduction r = Reduce(graph()->NewNode(common()->Projection(1), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
r = Reduce(graph()->NewNode(common()->Projection(0), add));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(p0, r.replacement());
}
{
Node* add = graph()->NewNode(machine()->Int32AddWithOverflow(), p0,
Int32Constant(0));
Reduction r = Reduce(graph()->NewNode(common()->Projection(1), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
r = Reduce(graph()->NewNode(common()->Projection(0), add));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(p0, r.replacement());
}
}
TEST_F(MachineOperatorReducerTest, Int32AddWithOverflowWithConstant) {
TRACED_FOREACH(int32_t, x, kInt32Values) {
TRACED_FOREACH(int32_t, y, kInt32Values) {
int32_t z;
Node* add = graph()->NewNode(machine()->Int32AddWithOverflow(),
Int32Constant(x), Int32Constant(y));
Reduction r = Reduce(graph()->NewNode(common()->Projection(1), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Constant(base::bits::SignedAddOverflow32(x, y, &z)));
r = Reduce(graph()->NewNode(common()->Projection(0), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(z));
}
}
}
// -----------------------------------------------------------------------------
// Int32SubWithOverflow
TEST_F(MachineOperatorReducerTest, Int32SubWithOverflowWithZero) {
Node* p0 = Parameter(0);
Node* add =
graph()->NewNode(machine()->Int32SubWithOverflow(), p0, Int32Constant(0));
Reduction r = Reduce(graph()->NewNode(common()->Projection(1), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(0));
r = Reduce(graph()->NewNode(common()->Projection(0), add));
ASSERT_TRUE(r.Changed());
EXPECT_EQ(p0, r.replacement());
}
TEST_F(MachineOperatorReducerTest, Int32SubWithOverflowWithConstant) {
TRACED_FOREACH(int32_t, x, kInt32Values) {
TRACED_FOREACH(int32_t, y, kInt32Values) {
int32_t z;
Node* add = graph()->NewNode(machine()->Int32SubWithOverflow(),
Int32Constant(x), Int32Constant(y));
Reduction r = Reduce(graph()->NewNode(common()->Projection(1), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsInt32Constant(base::bits::SignedSubOverflow32(x, y, &z)));
r = Reduce(graph()->NewNode(common()->Projection(0), add));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsInt32Constant(z));
}
}
}
// -----------------------------------------------------------------------------
// Uint32LessThan
TEST_F(MachineOperatorReducerTest, Uint32LessThanWithWord32Sar) {
Node* const p0 = Parameter(0);
TRACED_FORRANGE(uint32_t, shift, 1, 3) {
const uint32_t limit = (kMaxInt >> shift) - 1;
Node* const node = graph()->NewNode(
machine()->Uint32LessThan(),
graph()->NewNode(machine()->Word32Sar(), p0, Uint32Constant(shift)),
Uint32Constant(limit));
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsUint32LessThan(
p0, IsInt32Constant(bit_cast<int32_t>(limit << shift))));
}
}
// -----------------------------------------------------------------------------
// Float64Mul
TEST_F(MachineOperatorReducerTest, Float64MulWithMinusOne) {
Node* const p0 = Parameter(0);
{
Reduction r = Reduce(
graph()->NewNode(machine()->Float64Mul(), p0, Float64Constant(-1.0)));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsFloat64Sub(IsFloat64Constant(BitEq(-0.0)), p0));
}
{
Reduction r = Reduce(
graph()->NewNode(machine()->Float64Mul(), Float64Constant(-1.0), p0));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsFloat64Sub(IsFloat64Constant(BitEq(-0.0)), p0));
}
}
// -----------------------------------------------------------------------------
// Float64InsertLowWord32
TEST_F(MachineOperatorReducerTest, Float64InsertLowWord32WithConstant) {
TRACED_FOREACH(double, x, kFloat64Values) {
TRACED_FOREACH(uint32_t, y, kUint32Values) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Float64InsertLowWord32(),
Float64Constant(x), Uint32Constant(y)));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
IsFloat64Constant(BitEq(bit_cast<double>(
(bit_cast<uint64_t>(x) & V8_UINT64_C(0xFFFFFFFF00000000)) | y))));
}
}
}
// -----------------------------------------------------------------------------
// Float64InsertHighWord32
TEST_F(MachineOperatorReducerTest, Float64InsertHighWord32WithConstant) {
TRACED_FOREACH(double, x, kFloat64Values) {
TRACED_FOREACH(uint32_t, y, kUint32Values) {
Reduction const r =
Reduce(graph()->NewNode(machine()->Float64InsertHighWord32(),
Float64Constant(x), Uint32Constant(y)));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsFloat64Constant(BitEq(bit_cast<double>(
(bit_cast<uint64_t>(x) & V8_UINT64_C(0xFFFFFFFF)) |
(static_cast<uint64_t>(y) << 32)))));
}
}
}
// -----------------------------------------------------------------------------
// Store
TEST_F(MachineOperatorReducerTest, StoreRepWord8WithWord32And) {
const StoreRepresentation rep(kRepWord8, kNoWriteBarrier);
Node* const base = Parameter(0);
Node* const index = Parameter(1);
Node* const value = Parameter(2);
Node* const effect = graph()->start();
Node* const control = graph()->start();
TRACED_FOREACH(uint32_t, x, kUint32Values) {
Node* const node =
graph()->NewNode(machine()->Store(rep), base, index,
graph()->NewNode(machine()->Word32And(), value,
Uint32Constant(x | 0xffu)),
effect, control);
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsStore(rep, base, index, value, effect, control));
}
}
TEST_F(MachineOperatorReducerTest, StoreRepWord8WithWord32SarAndWord32Shl) {
const StoreRepresentation rep(kRepWord8, kNoWriteBarrier);
Node* const base = Parameter(0);
Node* const index = Parameter(1);
Node* const value = Parameter(2);
Node* const effect = graph()->start();
Node* const control = graph()->start();
TRACED_FORRANGE(int32_t, x, 1, 24) {
Node* const node = graph()->NewNode(
machine()->Store(rep), base, index,
graph()->NewNode(
machine()->Word32Sar(),
graph()->NewNode(machine()->Word32Shl(), value, Int32Constant(x)),
Int32Constant(x)),
effect, control);
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsStore(rep, base, index, value, effect, control));
}
}
TEST_F(MachineOperatorReducerTest, StoreRepWord16WithWord32And) {
const StoreRepresentation rep(kRepWord16, kNoWriteBarrier);
Node* const base = Parameter(0);
Node* const index = Parameter(1);
Node* const value = Parameter(2);
Node* const effect = graph()->start();
Node* const control = graph()->start();
TRACED_FOREACH(uint32_t, x, kUint32Values) {
Node* const node =
graph()->NewNode(machine()->Store(rep), base, index,
graph()->NewNode(machine()->Word32And(), value,
Uint32Constant(x | 0xffffu)),
effect, control);
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsStore(rep, base, index, value, effect, control));
}
}
TEST_F(MachineOperatorReducerTest, StoreRepWord16WithWord32SarAndWord32Shl) {
const StoreRepresentation rep(kRepWord16, kNoWriteBarrier);
Node* const base = Parameter(0);
Node* const index = Parameter(1);
Node* const value = Parameter(2);
Node* const effect = graph()->start();
Node* const control = graph()->start();
TRACED_FORRANGE(int32_t, x, 1, 16) {
Node* const node = graph()->NewNode(
machine()->Store(rep), base, index,
graph()->NewNode(
machine()->Word32Sar(),
graph()->NewNode(machine()->Word32Shl(), value, Int32Constant(x)),
Int32Constant(x)),
effect, control);
Reduction r = Reduce(node);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsStore(rep, base, index, value, effect, control));
}
}
} // namespace compiler
} // namespace internal
} // namespace v8<|fim▁end|> | graph()->NewNode(machine()->Word32Shl(), value, Int32Constant(k));
Node* shr =
graph()->NewNode(machine()->Word32Shr(), value, Int32Constant(32 - k)); |
<|file_name|>App.js<|end_file_name|><|fim▁begin|>import React from "react"
import Presentation from "./Presentation"
import Icon from 'material-ui/Icon'
import IconButton from 'material-ui/IconButton'
import Grid from 'material-ui/Grid'
import Typography from 'material-ui/Typography'
import { colors } from "../themes/coinium"
require("../themes/coinium/index.css")
const FOOTER_WIDTH = 60
const MODES = {
PRESENTATION: 0,
HELP: 1
}
export default class App extends React.Component {
constructor(props) {
super(props);
this.state = {
mode: MODES.PRESENTATION
};
}
goToSlide(slideName) {
this.setState({mode: MODES.PRESENTATION}, () => {
location.hash = `/${slideName}`
})
}
renderHelp() {
const style = {
height: '100%',
backgroundColor: colors.primary
}
const creditsStyle = {
opacity: 0.8
}
return (
<Grid container direction="column" justify="center" align="center" style={style}>
<Typography type="caption" style={creditsStyle}>
Copyright 2017 Coinium, Inc
<hr />
Contact us: <a href="mailto:[email protected]">[email protected]</a>
<hr />
{"Some icons based on the work of "}
<a href="http://www.freepik.com" title="Freepik">Freepik</a>
{" from "}
<a href="https://www.flaticon.com/" title="Flaticon">www.flaticon.com</a>
{" are licensed by "}
<a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0" target="_blank">CC 3.0 BY</a>
</Typography>
</Grid>
)
}
renderCurrentPage() {
switch (this.state.mode) {
case MODES.PRESENTATION:
return <Presentation /><|fim▁hole|> <Typography>Please reload</Typography>
)
}
}
render() {
const mainStyle = {
position: 'fixed',
top: 0,
right: FOOTER_WIDTH,
bottom: 0,
left: 0,
boxShadow: '2px 0px 4px rgba(0,0,0,0.4)',
zIndex: 2,
overflow: 'hidden'
}
const navStyle = {
background: colors.secondary,
position: 'fixed',
top: 0,
right: 0,
bottom: 0,
left: 'auto',
width: FOOTER_WIDTH,
zIndex: 1
}
const onHelpClick = () => {
const mode = this.state.mode == MODES.HELP
? MODES.PRESENTATION
: MODES.HELP
this.setState({mode})
}
return (
<Grid container className="App">
<Grid item style={mainStyle}>
{this.renderCurrentPage()}
</Grid>
<Grid item container direction="column"
justify="space-between" align="center" spacing={0}
style={navStyle}>
<Grid>
<IconButton onClick={this.goToSlide.bind(this, "home")}>
<Icon color="contrast">home</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "problem")}>
<Icon color="contrast">info_outline</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "team")}>
<Icon color="contrast">people</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "mobile")}>
<Icon color="contrast">phone_iphone</Icon>
</IconButton>
<IconButton onClick={this.goToSlide.bind(this, "signup")}>
<Icon color="contrast">insert_drive_file</Icon>
</IconButton>
</Grid>
<Grid>
<IconButton onClick={onHelpClick}>
<Icon color="contrast">help_outline</Icon>
</IconButton>
</Grid>
</Grid>
</Grid>
);
}
}<|fim▁end|> | case MODES.HELP:
return this.renderHelp()
default:
return ( |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from transmute_core import *
# from .handler import convert_to_handler
# from .route import route
from .route_set import RouteSet<|fim▁hole|>from .swagger import add_swagger<|fim▁end|> | from .url import url_spec |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
################################################################################
# GIPS: Geospatial Image Processing System
#
# AUTHOR: Matthew Hanson
# EMAIL: [email protected]
#
# Copyright (C) 2014-2018 Applied Geosolutions
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
################################################################################
import os
from . import version
def detect_version():
"""Emit GIPS' software version. May be overridden for testing purposes.
To override version.py, put a desired version string in the environment
variable GIPS_OVERRIDE_VERSION."""
return os.environ.get('GIPS_OVERRIDE_VERSION', version.__version__)
__version__ = detect_version()<|fim▁end|> | # (at your option) any later version. |
<|file_name|>connection.rs<|end_file_name|><|fim▁begin|>// Distributed under the OSI-approved BSD 3-Clause License.
// See accompanying LICENSE file for details.
use crates::dbus_bytestream::connection;
use error::*;
use message::{Message, MessageType};
use value::{BasicValue, Value};
bitflags! {
/// Flags for use when requesting a name on the bus from the bus.
pub flags RequestNameFlags: u32 {
/// Allow replacement if another request for the same name later uses the
/// `REPLACE_EXISTING` flag when requesting the same name.
const ALLOW_REPLACEMENT = 0x1,
/// Try and replace the service using the name requested, if one exists. It must have
/// requested the name with `ALLOW_REPLACEMENT` for this to work.
const REPLACE_EXISTING = 0x2,
/// By default, the request for the name will be placed into a queue to wait for the name
/// to become available. Adding this flag will cause the request to fail instead.
const DO_NOT_QUEUE = 0x4,
}
}
#[derive(Debug, PartialEq, Eq)]
/// Replies from the server when requesting a name.
pub enum RequestNameReply {
/// The service has become the primary owner of the name.
PrimaryOwner,
/// The request is in the queue to become the owner of the name.
InQueue,
/// The name is already owned and may not be replaced.
Exists,
/// The application requesting the name already owns the name.
AlreadyOwner,
}
#[derive(Debug, PartialEq, Eq)]
/// Replies from the server when releasing a name.
pub enum ReleaseNameReply {
/// The name has been released.
Released,
/// The name is not bound to any service.
NonExistent,
/// The application releasing the name doesn't own the name.
NotOwner,
}
/// An iterator over messages received from the message bus.
pub struct Messages<'a> {
conn: &'a connection::Connection,
}
/// A connection to a bus.
///
/// A connection is usually to either the system bus or a session bus. User services (e.g.,
/// `SecretService`, notification daemons, etc.) live on the session bus while system services
/// (e.g., `Udisks2`, `NetworkManager`, etc.) live on the system bus.
pub struct Connection {
conn: connection::Connection,
}
impl Connection {
// TODO: Expose other connection methods?
/// Connect to the session bus.
pub fn session_new() -> Result<Self> {
Ok(Connection {
conn: connection::Connection::connect_session()?,
})
}
/// Connect to the system bus.
pub fn system_new() -> Result<Self> {
Ok(Connection {
conn: connection::Connection::connect_system()?,
})
}
/// Request a name on the bus.
///
/// By default, the name to address this connection directly is assigned by the daemon managing
/// the bus, but a name for the application may be requested. Names are, by convention, in a
/// reverse domain name format and use CamelCase for application-level names (e.g.,
/// `com.example.Application`).
pub fn request_name(&self, name: &str, flags: RequestNameFlags)
-> Result<RequestNameReply> {
// TODO: Use an actual struct with an API for this.
let msg = Message::new_method_call("org.freedesktop.DBus",
"/org/freedesktop/DBus",
"org.freedesktop.DBus",
"RequestName")
.add_argument(&name)
.add_argument(&flags.bits);
if let Some(mut results) = self.conn.call_sync(msg.message)? {
if let Some(Value::BasicValue(BasicValue::Uint32(r))) = results.pop() {
match r {
1 => Ok(RequestNameReply::PrimaryOwner),
2 => Ok(RequestNameReply::InQueue),
3 => Ok(RequestNameReply::Exists),
4 => Ok(RequestNameReply::AlreadyOwner),
_ => bail!(ErrorKind::InvalidReply(format!("RequestName: invalid response {}", r))),
}
} else {
bail!(ErrorKind::InvalidReply("RequestName: invalid response".to_string()));
}
} else {
bail!(ErrorKind::InvalidReply("RequestName: no response".to_string()));
}
}
<|fim▁hole|> "/org/freedesktop/DBus",
"org.freedesktop.DBus",
"ReleaseName")
.add_argument(&name);
if let Some(mut results) = self.conn.call_sync(msg.message)? {
if let Some(Value::BasicValue(BasicValue::Uint32(r))) = results.pop() {
match r {
1 => Ok(ReleaseNameReply::Released),
2 => Ok(ReleaseNameReply::NonExistent),
3 => Ok(ReleaseNameReply::NotOwner),
_ => bail!(ErrorKind::InvalidReply(format!("ReleaseName: invalid response {}", r))),
}
} else {
bail!(ErrorKind::InvalidReply("ReleaseName: invalid response".to_string()));
}
} else {
bail!(ErrorKind::InvalidReply("ReleaseName: no response".to_string()));
}
}
/// Requests the server to route messages to this connection.
///
/// By default, the server will not deliver any messages to this connection. In order to
/// receive messages, the manager must be told that the messages are wanted.
///
/// The match syntax is documented in the [D-Bus
/// specification](https://dbus.freedesktop.org/doc/dbus-specification.html#message-bus-routing).
pub fn add_match(&self, match_rule: &str) -> Result<()> {
let msg = Message::new_method_call("org.freedesktop.DBus",
"/org/freedesktop/DBus",
"org.freedesktop.DBus",
"AddMatch")
.add_argument(&match_rule);
self.conn.call_sync(msg.message)?;
Ok(())
}
/// Send a `Message` on the bus.
///
/// On success, returns the serial number of the message.
pub fn send(&self, msg: Message) -> Result<u32> {
Ok(self.conn.send(msg.message)?)
}
/// An iterator over messages received over the bus.
pub fn iter(&self) -> Messages {
Messages {
conn: &self.conn,
}
}
}
fn _should_handle(message: &Message) -> bool {
match message.message_type() {
MessageType::MethodCall | MessageType::Signal => true,
_ => false,
}
}
impl<'a> Iterator for Messages<'a> {
type Item = Message;
/// Returns messages received from the bus.
///
/// Note that this currently blocks. See [this
/// issue](https://github.com/srwalter/dbus-bytestream/issues/10) for progress on supporting an
/// event loop.
fn next(&mut self) -> Option<Self::Item> {
let res = self.conn.read_msg();
match res {
Ok(message) => {
let dbus_message = Message::new(message);
if _should_handle(&dbus_message) {
Some(dbus_message)
} else {
None
}
},
Err(_) => None,
}
}
}<|fim▁end|> | /// Release a name on the bus.
pub fn release_name(&self, name: &str) -> Result<ReleaseNameReply> {
// TODO: Use an actual struct with an API for this.
let msg = Message::new_method_call("org.freedesktop.DBus", |
<|file_name|>from_form_value.rs<|end_file_name|><|fim▁begin|>use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddrV4, SocketAddrV6, SocketAddr};
use std::str::FromStr;
use http::RawStr;
/// Trait to create instance of some type from a form value; expected from field<|fim▁hole|>///
/// When deriving the `FromForm` trait, Rocket uses the `FromFormValue`
/// implementation of each field's type to validate the form input. To
/// illustrate, consider the following structure:
///
/// ```rust,ignore
/// #[derive(FromForm)]
/// struct Person {
/// name: String,
/// age: u16
/// }
/// ```
///
/// The `FromForm` implementation generated by Rocket will call
/// `String::from_form_value` for the `name` field, and `u16::from_form_value`
/// for the `age` field. The `Person` structure can only be created from a form
/// if both calls return successfully.
///
/// ## Catching Validation Errors
///
/// Sometimes you want to be informed of validation errors. When this is
/// desired, types of `Option<T>` or `Result<T, T::Error>` can be used. These
/// types implement `FromFormValue` themselves. Their implementations always
/// return successfully, so their validation never fails. They can be used to
/// determine if the `from_form_value` call failed and to retrieve the error
/// value from the failed call.
///
/// For instance, if we wanted to know if a user entered an invalid `age` in the
/// form corresponding to the `Person` structure above, we could use the
/// following structure:
///
/// ```rust
/// # use rocket::http::RawStr;
/// # #[allow(dead_code)]
/// struct Person<'r> {
/// name: String,
/// age: Result<u16, &'r RawStr>
/// }
/// ```
///
/// The `Err` value in this case is `&RawStr` since `u16::from_form_value`
/// returns a `Result<u16, &RawStr>`.
///
/// # Provided Implementations
///
/// Rocket implements `FromFormValue` for many standard library types. Their
/// behavior is documented here.
///
/// * **f32, f64, isize, i8, i16, i32, i64, usize, u8, u16, u32, u64
/// IpAddr, Ipv4Addr, Ipv6Addr, SocketAddrV4, SocketAddrV6, SocketAddr**
///
/// A value is validated successfully if the `from_str` method for the given
/// type returns successfully. Otherwise, the raw form value is returned as
/// the `Err` value.
///
/// * **bool**
///
/// A value is validated successfully as `true` if the the form value is
/// `"true"` or `"on"`, and as a `false` value if the form value is
/// `"false"`, `"off"`, or not present. In any other case, the raw form
/// value is returned in the `Err` value.
///
/// * **&[RawStr](/rocket/http/struct.RawStr.html)**
///
/// _This implementation always returns successfully._
///
/// The raw, undecoded string is returned directly without modification.
///
/// * **String**
///
/// URL decodes the form value. If the decode is successful, the decoded
/// string is returned. Otherwise, an `Err` with the original form value is
/// returned.
///
/// * **Option<T>** _where_ **T: FromFormValue**
///
/// _This implementation always returns successfully._
///
/// The form value is validated by `T`'s `FromFormValue` implementation. If
/// the validation succeeds, a `Some(validated_value)` is returned.
/// Otherwise, a `None` is returned.
///
/// * **Result<T, T::Error>** _where_ **T: FromFormValue**
///
/// _This implementation always returns successfully._
///
/// The from value is validated by `T`'s `FromFormvalue` implementation. The
/// returned `Result` value is returned.
///
/// # Example
///
/// This trait is generally implemented to parse and validate form values. While
/// Rocket provides parsing and validation for many of the standard library
/// types such as `u16` and `String`, you can implement `FromFormValue` for a
/// custom type to get custom validation.
///
/// Imagine you'd like to verify that some user is over some age in a form. You
/// might define a new type and implement `FromFormValue` as follows:
///
/// ```rust
/// use rocket::request::FromFormValue;
/// use rocket::http::RawStr;
///
/// struct AdultAge(usize);
///
/// impl<'v> FromFormValue<'v> for AdultAge {
/// type Error = &'v RawStr;
///
/// fn from_form_value(form_value: &'v RawStr) -> Result<AdultAge, &'v RawStr> {
/// match form_value.parse::<usize>() {
/// Ok(age) if age >= 21 => Ok(AdultAge(age)),
/// _ => Err(form_value),
/// }
/// }
/// }
/// ```
///
/// The type can then be used in a `FromForm` struct as follows:
///
/// ```rust,ignore
/// #[derive(FromForm)]
/// struct Person {
/// name: String,
/// age: AdultAge
/// }
/// ```
///
/// A form using the `Person` structure as its target will only parse and
/// validate if the `age` field contains a `usize` greater than `21`.
pub trait FromFormValue<'v>: Sized {
/// The associated error which can be returned from parsing. It is a good
/// idea to have the return type be or contain an `&'v str` so that the
/// unparseable string can be examined after a bad parse.
type Error;
/// Parses an instance of `Self` from an HTTP form field value or returns an
/// `Error` if one cannot be parsed.
fn from_form_value(form_value: &'v RawStr) -> Result<Self, Self::Error>;
/// Returns a default value to be used when the form field does not exist.
/// If this returns `None`, then the field is required. Otherwise, this
/// should return `Some(default_value)`. The default implementation simply
/// returns `None`.
#[inline(always)]
fn default() -> Option<Self> {
None
}
}
impl<'v> FromFormValue<'v> for &'v RawStr {
type Error = !;
// This just gives the raw string.
#[inline(always)]
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
Ok(v)
}
}
impl<'v> FromFormValue<'v> for String {
type Error = &'v RawStr;
// This actually parses the value according to the standard.
#[inline(always)]
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
v.url_decode().map_err(|_| v)
}
}
impl<'v> FromFormValue<'v> for bool {
type Error = &'v RawStr;
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
match v.as_str() {
"on" | "true" => Ok(true),
"off" | "false" => Ok(false),
_ => Err(v),
}
}
#[inline(always)]
fn default() -> Option<bool> {
Some(false)
}
}
macro_rules! impl_with_fromstr {
($($T:ident),+) => ($(
impl<'v> FromFormValue<'v> for $T {
type Error = &'v RawStr;
#[inline(always)]
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
$T::from_str(v.as_str()).map_err(|_| v)
}
}
)+)
}
impl_with_fromstr!(f32, f64, isize, i8, i16, i32, i64, usize, u8, u16, u32, u64,
IpAddr, Ipv4Addr, Ipv6Addr, SocketAddrV4, SocketAddrV6, SocketAddr);
impl<'v, T: FromFormValue<'v>> FromFormValue<'v> for Option<T> {
type Error = !;
#[inline(always)]
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
match T::from_form_value(v) {
Ok(v) => Ok(Some(v)),
Err(_) => Ok(None),
}
}
#[inline(always)]
fn default() -> Option<Option<T>> {
Some(None)
}
}
// // TODO: Add more useful implementations (range, regex, etc.).
impl<'v, T: FromFormValue<'v>> FromFormValue<'v> for Result<T, T::Error> {
type Error = !;
#[inline(always)]
fn from_form_value(v: &'v RawStr) -> Result<Self, Self::Error> {
match T::from_form_value(v) {
ok@Ok(_) => Ok(ok),
e@Err(_) => Ok(e),
}
}
}<|fim▁end|> | /// types in structs deriving `FromForm`. |
<|file_name|>user_03.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Yue-Wen FANG'
__maintainer__ = "Yue-Wen FANG"
__email__ = '[email protected]'
__license__ = 'Apache License 2.0'
__creation_date__= 'Dec. 28, 2018'
"""
9-3. Users: Make a class called User . Create two attributes called first_name and last_name, and then create
several other attributes that are typically stored in a user profile . Make a method called describe_user()
that prints a summary of the user’s information . Make another method called greet_user() that prints a
personalized greeting to the user .
Create several instances representing different users, and call both methods for each user .t mv dog.py
"""
class User:
"""
a class for User
"""
def __init__(self, first_name, last_name, gender, age, email='f@cn'):
self.name = first_name + last_name
self.gender = gender
self.age = age
self.email = email # if no email is specified, the default will be used
<|fim▁hole|> def describe_use(self):
print('The profile of ' + self.name + ":")
print('Gender: ', self.gender)
print('Age: ', self.age)
print('Email: ', self.email)
Tiantian_Li = User('Tiantian', 'Li', 'Male', '20', email='Li@cn')
Tiantian_Li.describe_use()<|fim▁end|> | |
<|file_name|>101_test.py<|end_file_name|><|fim▁begin|>from fruits import validate_fruit
fruits = ["banana", "lemon", "apple", "orange", "batman"]
print fruits
def list_fruits(fruits, byName=True):
if byName:<|fim▁hole|> # WARNING: this won't make a copy of the list and return it. It will change the list FOREVER
fruits.sort()
for index, fruit in enumerate(fruits):
if validate_fruit(fruit):
print "Fruit nr %d is %s" % (index, fruit)
else:
print "This %s is no fruit!" % (fruit)
list_fruits(fruits)
print fruits<|fim▁end|> | |
<|file_name|>dli_powerswitch_logs.py<|end_file_name|><|fim▁begin|># Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Device logs for DLI powerswitch devices."""
RETURN_CODE = "200\n"
ERROR_RETURN_CODE = "409\n"
DEFAULT_BEHAVIOR = {
"http://123.45.67.89/restapi/config/=version/": {
"text": '["1.7.15.0"]',
"status_code": "207"<|fim▁hole|> },
"http://123.45.67.89/restapi/config/=serial/": {
"text": '["ABCD1234"]',
"status_code": "207"
},
"http://123.45.67.89/restapi/config/=brand_company_name/": {
"text": '["Digital Loggers, Inc."]',
"status_code": "207"
},
"http://123.45.67.89/restapi/config/=brand_name/": {
"text": '["Web Power Switch"]',
"status_code": "207"
},
"http://123.45.67.89/restapi/relay/outlets/=1/state/": {
"text": '["true"]',
"status_code": "207"
},
}<|fim▁end|> | |
<|file_name|>languages.js<|end_file_name|><|fim▁begin|>//
// Touches the DOM.
// This file listens to events from the language selector and changes the
// DOM to have the language requested.
// Uses globals from chal-header.html.
//
// Selecting the current locale
var selector = document.getElementById('lang-select')
// add change listener
selector.addEventListener('change', function (event) {
// Go to page in the locale specified
var location = window.location<|fim▁hole|>})<|fim▁end|> | var url = location.href.replace(/built\/([a-z]{2}-[A-Z]{2})/, 'built/' + selector.value)
location.href = url |
<|file_name|>cc-form.js<|end_file_name|><|fim▁begin|>/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
/* @api */
define([
'underscore',
'Magento_Checkout/js/view/payment/default',
'Magento_Payment/js/model/credit-card-validation/credit-card-data',
'Magento_Payment/js/model/credit-card-validation/credit-card-number-validator',
'mage/translate'
], function (_, Component, creditCardData, cardNumberValidator, $t) {
'use strict';
return Component.extend({
defaults: {
creditCardType: '',
creditCardExpYear: '',
creditCardExpMonth: '',
creditCardNumber: '',
creditCardSsStartMonth: '',
creditCardSsStartYear: '',
creditCardSsIssue: '',
creditCardVerificationNumber: '',
selectedCardType: null
},
/** @inheritdoc */
initObservable: function () {
this._super()
.observe([
'creditCardType',
'creditCardExpYear',
'creditCardExpMonth',
'creditCardNumber',
'creditCardVerificationNumber',
'creditCardSsStartMonth',
'creditCardSsStartYear',
'creditCardSsIssue',
'selectedCardType'
]);
return this;
},
/**
* Init component
*/
initialize: function () {
var self = this;
this._super();
//Set credit card number to credit card data object
this.creditCardNumber.subscribe(function (value) {
var result;
self.selectedCardType(null);
if (value === '' || value === null) {
return false;
}
result = cardNumberValidator(value);
if (!result.isPotentiallyValid && !result.isValid) {
return false;
}
if (result.card !== null) {
self.selectedCardType(result.card.type);
creditCardData.creditCard = result.card;
}
if (result.isValid) {
creditCardData.creditCardNumber = value;
self.creditCardType(result.card.type);
}
});
//Set expiration year to credit card data object
this.creditCardExpYear.subscribe(function (value) {
creditCardData.expirationYear = value;
});
<|fim▁hole|> creditCardData.expirationMonth = value;
});
//Set cvv code to credit card data object
this.creditCardVerificationNumber.subscribe(function (value) {
creditCardData.cvvCode = value;
});
},
/**
* Get code
* @returns {String}
*/
getCode: function () {
return 'cc';
},
/**
* Get data
* @returns {Object}
*/
getData: function () {
return {
'method': this.item.method,
'additional_data': {
'cc_cid': this.creditCardVerificationNumber(),
'cc_ss_start_month': this.creditCardSsStartMonth(),
'cc_ss_start_year': this.creditCardSsStartYear(),
'cc_ss_issue': this.creditCardSsIssue(),
'cc_type': this.creditCardType(),
'cc_exp_year': this.creditCardExpYear(),
'cc_exp_month': this.creditCardExpMonth(),
'cc_number': this.creditCardNumber()
}
};
},
/**
* Get list of available credit card types
* @returns {Object}
*/
getCcAvailableTypes: function () {
return window.checkoutConfig.payment.ccform.availableTypes[this.getCode()];
},
/**
* Get payment icons
* @param {String} type
* @returns {Boolean}
*/
getIcons: function (type) {
return window.checkoutConfig.payment.ccform.icons.hasOwnProperty(type) ?
window.checkoutConfig.payment.ccform.icons[type]
: false;
},
/**
* Get list of months
* @returns {Object}
*/
getCcMonths: function () {
return window.checkoutConfig.payment.ccform.months[this.getCode()];
},
/**
* Get list of years
* @returns {Object}
*/
getCcYears: function () {
return window.checkoutConfig.payment.ccform.years[this.getCode()];
},
/**
* Check if current payment has verification
* @returns {Boolean}
*/
hasVerification: function () {
return window.checkoutConfig.payment.ccform.hasVerification[this.getCode()];
},
/**
* @deprecated
* @returns {Boolean}
*/
hasSsCardType: function () {
return window.checkoutConfig.payment.ccform.hasSsCardType[this.getCode()];
},
/**
* Get image url for CVV
* @returns {String}
*/
getCvvImageUrl: function () {
return window.checkoutConfig.payment.ccform.cvvImageUrl[this.getCode()];
},
/**
* Get image for CVV
* @returns {String}
*/
getCvvImageHtml: function () {
return '<img src="' + this.getCvvImageUrl() +
'" alt="' + $t('Card Verification Number Visual Reference') +
'" title="' + $t('Card Verification Number Visual Reference') +
'" />';
},
/**
* @deprecated
* @returns {Object}
*/
getSsStartYears: function () {
return window.checkoutConfig.payment.ccform.ssStartYears[this.getCode()];
},
/**
* Get list of available credit card types values
* @returns {Object}
*/
getCcAvailableTypesValues: function () {
return _.map(this.getCcAvailableTypes(), function (value, key) {
return {
'value': key,
'type': value
};
});
},
/**
* Get list of available month values
* @returns {Object}
*/
getCcMonthsValues: function () {
return _.map(this.getCcMonths(), function (value, key) {
return {
'value': key,
'month': value
};
});
},
/**
* Get list of available year values
* @returns {Object}
*/
getCcYearsValues: function () {
return _.map(this.getCcYears(), function (value, key) {
return {
'value': key,
'year': value
};
});
},
/**
* @deprecated
* @returns {Object}
*/
getSsStartYearsValues: function () {
return _.map(this.getSsStartYears(), function (value, key) {
return {
'value': key,
'year': value
};
});
},
/**
* Is legend available to display
* @returns {Boolean}
*/
isShowLegend: function () {
return false;
},
/**
* Get available credit card type by code
* @param {String} code
* @returns {String}
*/
getCcTypeTitleByCode: function (code) {
var title = '',
keyValue = 'value',
keyType = 'type';
_.each(this.getCcAvailableTypesValues(), function (value) {
if (value[keyValue] === code) {
title = value[keyType];
}
});
return title;
},
/**
* Prepare credit card number to output
* @param {String} number
* @returns {String}
*/
formatDisplayCcNumber: function (number) {
return 'xxxx-' + number.substr(-4);
},
/**
* Get credit card details
* @returns {Array}
*/
getInfo: function () {
return [
{
'name': 'Credit Card Type', value: this.getCcTypeTitleByCode(this.creditCardType())
},
{
'name': 'Credit Card Number', value: this.formatDisplayCcNumber(this.creditCardNumber())
}
];
}
});
});<|fim▁end|> | //Set expiration month to credit card data object
this.creditCardExpMonth.subscribe(function (value) { |
<|file_name|>tictactoe.py<|end_file_name|><|fim▁begin|># Tic Tac Toe
# Tic Tac Toe
import random
def drawBoard(board):
# This function prints out the board that it was passed.
# "board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
def inputPlayerLetter():
# Let's the player type which letter they want to be.
# Returns a list with the player's letter as the first item, and the computer's letter as the second.
letter = ''
while not (letter == 'X' or letter == 'O'):
print('Do you want to be X or O?')
letter = input().upper()
# the first element in the tuple is the player's letter, the second is the computer's letter.
if letter == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
# Randomly choose the player who goes first.
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
# This function returns True if the player wants to play again, otherwise it returns False.
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, letter, move):
board[move] = letter
def isWinner(bo, le):
# Given a board and a player's letter, this function returns True if that player has won.
# We use bo instead of board and le instead of letter so we don't have to type as much.
return ((bo[7] == le and bo[8] == le and bo[9] == le) or # across the top
(bo[4] == le and bo[5] == le and bo[6] == le) or # across the middle
(bo[1] == le and bo[2] == le and bo[3] == le) or # across the bottom
(bo[7] == le and bo[4] == le and bo[1] == le) or # down the left side
(bo[8] == le and bo[5] == le and bo[2] == le) or # down the middle
(bo[9] == le and bo[6] == le and bo[3] == le) or # down the right side
(bo[7] == le and bo[5] == le and bo[3] == le) or # diagonal
(bo[9] == le and bo[5] == le and bo[1] == le)) # diagonal
def getBoardCopy(board):
# Make a duplicate of the board list and return it the duplicate.
dupeBoard = []
for i in board:
dupeBoard.append(i)
return dupeBoard
def isSpaceFree(board, move):
# Return true if the passed move is free on the passed board.
return board[move] == ' '
def getPlayerMove(board):
# Let the player type in his move.
move = ' '
while move not in '1 2 3 4 5 6 7 8 9'.split() or not isSpaceFree(board, int(move)):
print('What is your next move? (1-9)')
move = input()
return int(move)
def chooseRandomMoveFromList(board, movesList):
# Returns a valid move from the passed list on the passed board.
# Returns None if there is no valid move.
possibleMoves = []
for i in movesList:
if isSpaceFree(board, i):
possibleMoves.append(i)
if len(possibleMoves) != 0:
return random.choice(possibleMoves)
else:
return None
def getComputerMove(board, computerLetter):
# Given a board and the computer's letter, determine where to move and return that move.
if computerLetter == 'X':
playerLetter = 'O'
else:
playerLetter = 'X'
# Here is our algorithm for our Tic Tac Toe AI:
# First, check if we can win in the next move
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, computerLetter, i)
if isWinner(copy, computerLetter):
return i
# Check if the player could win on his next move, and block them.
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, playerLetter, i)
if isWinner(copy, playerLetter):
return i
# Try to take one of the corners, if they are free.
move = chooseRandomMoveFromList(board, [1, 3, 7, 9])
if move != None:
return move
# Try to take the center, if it is free.
if isSpaceFree(board, 5):
return 5
# Move on one of the sides.
return chooseRandomMoveFromList(board, [2, 4, 6, 8])
def isBoardFull(board):
# Return True if every space on the board has been taken. Otherwise return False.
for i in range(1, 10):
if isSpaceFree(board, i):
return False<|fim▁hole|>print('Welcome to Tic Tac Toe!')
while True:
# Reset the board
theBoard = [' '] * 10
playerLetter, computerLetter = inputPlayerLetter()
turn = whoGoesFirst()
print('The ' + turn + ' will go first.')
gameIsPlaying = True
while gameIsPlaying:
if turn == 'player':
# Player's turn.
drawBoard(theBoard)
move = getPlayerMove(theBoard)
makeMove(theBoard, playerLetter, move)
if isWinner(theBoard, playerLetter):
drawBoard(theBoard)
print('Hooray! You have won the game!')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'computer'
else:
# Computer's turn.
move = getComputerMove(theBoard, computerLetter)
makeMove(theBoard, computerLetter, move)
if isWinner(theBoard, computerLetter):
drawBoard(theBoard)
print('The computer has beaten you! You lose.')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'player'
if not playAgain():
break<|fim▁end|> | return True
|
<|file_name|>XMLUtil.java<|end_file_name|><|fim▁begin|>/* Mesquite source code. Copyright 1997-2009 W. Maddison and D. Maddison.
Version 2.7, August 2009.
Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code.
The commenting leaves much to be desired. Please approach this source code with the spirit of helping out.
Perhaps with your help we can be more than a few, and make Mesquite better.
Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY.
Mesquite's web site is http://mesquiteproject.org
This source code and its compiled class files are free and modifiable under the terms of
GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html)
*/
package mesquite.lib;
import java.util.*;
import java.io.*;
import org.dom4j.*;
import org.dom4j.io.*;
public class XMLUtil {
/*.................................................................................................................*/
public static Element addFilledElement(Element containingElement, String name, String content) {
if (content == null || name == null)
return null;
Element element = DocumentHelper.createElement(name);
element.addText(content);
containingElement.add(element);
return element;
}
/*.................................................................................................................*/
public static Element addFilledElement(Element containingElement, String name, CDATA cdata) {
if (cdata == null || name == null)
return null;
Element element = DocumentHelper.createElement(name);
element.add(cdata);
containingElement.add(element);
return element;
}
public static String getTextFromElement(Element containingElement, String name){
Element e = containingElement.element(name);
if (e == null)
return null;
else return e.getText();
}
/*.................................................................................................................*/
public static String getDocumentAsXMLString(Document doc, boolean escapeText)
{
try {
String encoding = doc.getXMLEncoding();
if (encoding == null)
encoding = "UTF-8";
Writer osw = new StringWriter();
OutputFormat opf = new OutputFormat(" ", true, encoding);
XMLWriter writer = new XMLWriter(osw, opf);
writer.setEscapeText(escapeText);
writer.write(doc);
writer.close();
return osw.toString();
} catch (IOException e) {
MesquiteMessage.warnProgrammer("XML Document could not be returned as string.");
}
return null;
}
/*.................................................................................................................*/
public static String getElementAsXMLString(Element doc, String encoding, boolean escapeText)
{
try {
Writer osw = new StringWriter();
OutputFormat opf = new OutputFormat(" ", true, encoding);
XMLWriter writer = new XMLWriter(osw, opf);
writer.setEscapeText(escapeText);
writer.write(doc);
writer.close();
return osw.toString();
} catch (IOException e) {
MesquiteMessage.warnProgrammer("XML Document could not be returned as string.");
}
return null;
}
/*.................................................................................................................*/
public static String getDocumentAsXMLString(Document doc) {
return getDocumentAsXMLString(doc,true);
}
/*.................................................................................................................*/
public static String getDocumentAsXMLString2(Document doc)
{
try {
String encoding = doc.getXMLEncoding();
//if (encoding == null)
// encoding = "UTF-8";
Writer osw = new StringWriter();
OutputFormat opf = new OutputFormat(" ", true);
XMLWriter writer = new XMLWriter(osw, opf);
writer.write(doc);
writer.close();
return osw.toString();
} catch (IOException e) {
MesquiteMessage.warnProgrammer("XML Document could not be returned as string.");
}
return null;
}
/*.................................................................................................................*/
public static Document getDocumentFromString(String rootElementName, String contents) {
Document doc = null;<|fim▁hole|> doc = DocumentHelper.parseText(contents);
} catch (Exception e) {
return null;
}
if (doc == null || doc.getRootElement() == null) {
return null;
} else if (!StringUtil.blank(rootElementName) && !doc.getRootElement().getName().equals(rootElementName)) {
return null;
}
return doc;
}
/*.................................................................................................................*/
public static Document getDocumentFromString(String contents) {
return getDocumentFromString("",contents);
}
/*.................................................................................................................*/
public static Element getRootXMLElementFromString(String rootElementName, String contents) {
Document doc = getDocumentFromString(rootElementName, contents);
if (doc==null)
return null;
return doc.getRootElement();
}
/*.................................................................................................................*/
public static Element getRootXMLElementFromString(String contents) {
return getRootXMLElementFromString("",contents);
}
/*.................................................................................................................*/
public static Element getRootXMLElementFromURL(String rootElementName, String url) {
SAXReader saxReader = new SAXReader();
Document doc = null;
try {
doc = saxReader.read(url);
} catch (Exception e) {
return null;
}
if (doc == null || doc.getRootElement() == null) {
return null;
} else if (!StringUtil.blank(rootElementName) && !doc.getRootElement().getName().equals(rootElementName)) {
return null;
}
Element root = doc.getRootElement();
return root;
}
/*.................................................................................................................*/
public static Element getRootXMLElementFromURL(String url) {
return getRootXMLElementFromURL("",url);
}
/*.................................................................................................................*/
public static void readXMLPreferences(MesquiteModule module, XMLPreferencesProcessor xmlPrefProcessor, String contents) {
Element root = getRootXMLElementFromString("mesquite",contents);
if (root==null)
return;
Element element = root.element(module.getXMLModuleName());
if (element != null) {
Element versionElement = element.element("version");
if (versionElement == null)
return ;
else {
int version = MesquiteInteger.fromString(element.elementText("version"));
boolean acceptableVersion = (module.getXMLPrefsVersion()==version || !module.xmlPrefsVersionMustMatch());
if (acceptableVersion)
processPreferencesFromXML(xmlPrefProcessor, element);
else
return;
}
}
}
/*.................................................................................................................*/
public static void processPreferencesFromXML ( XMLPreferencesProcessor xmlPrefProcessor, Element element) {
List prefElement = element.elements();
for (Iterator iter = prefElement.iterator(); iter.hasNext();) { // this is going through all of the notices
Element messageElement = (Element) iter.next();
xmlPrefProcessor.processSingleXMLPreference(messageElement.getName(), messageElement.getText());
}
}
}<|fim▁end|> | try { |
<|file_name|>handler_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 Canonical Ltd.
// Licensed under the LGPLv3, see LICENCE file for details.
package debugstatus_test
import (
"encoding/json"
"net/http"
jc "github.com/juju/testing/checkers"
"github.com/juju/testing/httptesting"
"github.com/juju/utils/debugstatus"
"github.com/julienschmidt/httprouter"
gc "gopkg.in/check.v1"
"gopkg.in/errgo.v1"
"github.com/juju/httprequest"
)
var errorMapper httprequest.ErrorMapper = func(err error) (httpStatus int, errorBody interface{}) {
return http.StatusInternalServerError, httprequest.RemoteError{
Message: err.Error(),
}
}
type handlerSuite struct {
}
var _ = gc.Suite(&handlerSuite{})
var errUnauthorized = errgo.New("you shall not pass!")
func newHTTPHandler(h *debugstatus.Handler) http.Handler {
errMapper := httprequest.ErrorMapper(func(err error) (httpStatus int, errorBody interface{}) {
code, status := "", http.StatusInternalServerError
switch errgo.Cause(err) {
case errUnauthorized:
code, status = "unauthorized", http.StatusUnauthorized
case debugstatus.ErrNoPprofConfigured:
code, status = "forbidden", http.StatusForbidden
case debugstatus.ErrNoTraceConfigured:
code, status = "forbidden", http.StatusForbidden
}
return status, httprequest.RemoteError{
Code: code,
Message: err.Error(),
}
})
handlers := errMapper.Handlers(func(httprequest.Params) (*debugstatus.Handler, error) {
return h, nil
})
r := httprouter.New()
for _, h := range handlers {
r.Handle(h.Method, h.Path, h.Handle)
}
return r
}
func (s *handlerSuite) TestServeDebugStatus(c *gc.C) {
httpHandler := newHTTPHandler(&debugstatus.Handler{
Check: func() map[string]debugstatus.CheckResult {
return debugstatus.Check(debugstatus.ServerStartTime)
},
})
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: "/debug/status",
ExpectBody: httptesting.BodyAsserter(func(c *gc.C, body json.RawMessage) {
var result map[string]debugstatus.CheckResult
err := json.Unmarshal(body, &result)
c.Assert(err, gc.IsNil)
for k, v := range result {
v.Duration = 0
result[k] = v
}
c.Assert(result, jc.DeepEquals, map[string]debugstatus.CheckResult{
"server_started": {
Name: "Server started",
Value: debugstatus.StartTime.String(),
Passed: true,
},
})
}),
})
}
func (s *handlerSuite) TestServeDebugStatusWithNilCheck(c *gc.C) {
httpHandler := newHTTPHandler(&debugstatus.Handler{})
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: "/debug/status",
ExpectBody: map[string]debugstatus.CheckResult{},
})
}
func (s *handlerSuite) TestServeDebugInfo(c *gc.C) {
version := debugstatus.Version{
GitCommit: "some-git-status",
Version: "a-version",
}
httpHandler := newHTTPHandler(&debugstatus.Handler{
Version: version,
})
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: "/debug/info",
ExpectStatus: http.StatusOK,
ExpectBody: version,
})
}<|fim▁hole|> "/debug/pprof/cmdline",
"/debug/pprof/profile?seconds=1",
"/debug/pprof/symbol",
"/debug/pprof/goroutine",
}
func (s *handlerSuite) TestServeDebugPprof(c *gc.C) {
httpHandler := newHTTPHandler(&debugstatus.Handler{
CheckPprofAllowed: func(req *http.Request) error {
if req.Header.Get("Authorization") == "" {
return errUnauthorized
}
return nil
},
})
authHeader := make(http.Header)
authHeader.Set("Authorization", "let me in")
for i, path := range debugPprofPaths {
c.Logf("%d. %s", i, path)
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: path,
ExpectStatus: http.StatusUnauthorized,
ExpectBody: httprequest.RemoteError{
Code: "unauthorized",
Message: "you shall not pass!",
},
})
rr := httptesting.DoRequest(c, httptesting.DoRequestParams{
Handler: httpHandler,
URL: path,
Header: authHeader,
})
c.Assert(rr.Code, gc.Equals, http.StatusOK)
}
}
func (s *handlerSuite) TestDebugPprofForbiddenWhenNotConfigured(c *gc.C) {
httpHandler := newHTTPHandler(&debugstatus.Handler{})
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: "/debug/pprof/",
ExpectStatus: http.StatusForbidden,
ExpectBody: httprequest.RemoteError{
Code: "forbidden",
Message: "no pprof access configured",
},
})
}
var debugTracePaths = []string{
"/debug/events",
"/debug/requests",
}
func (s *handlerSuite) TestServeTraceEvents(c *gc.C) {
httpHandler := newHTTPHandler(&debugstatus.Handler{
CheckTraceAllowed: func(req *http.Request) (bool, error) {
if req.Header.Get("Authorization") == "" {
return false, errUnauthorized
}
return false, nil
},
})
authHeader := make(http.Header)
authHeader.Set("Authorization", "let me in")
for i, path := range debugTracePaths {
c.Logf("%d. %s", i, path)
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: path,
ExpectStatus: http.StatusUnauthorized,
ExpectBody: httprequest.RemoteError{
Code: "unauthorized",
Message: "you shall not pass!",
},
})
rr := httptesting.DoRequest(c, httptesting.DoRequestParams{
Handler: httpHandler,
URL: path,
Header: authHeader,
})
c.Assert(rr.Code, gc.Equals, http.StatusOK)
}
}
func (s *handlerSuite) TestDebugEventsForbiddenWhenNotConfigured(c *gc.C) {
httpHandler := newHTTPHandler(&debugstatus.Handler{})
httptesting.AssertJSONCall(c, httptesting.JSONCallParams{
Handler: httpHandler,
URL: "/debug/events",
ExpectStatus: http.StatusForbidden,
ExpectBody: httprequest.RemoteError{
Code: "forbidden",
Message: "no trace access configured",
},
})
}<|fim▁end|> |
var debugPprofPaths = []string{
"/debug/pprof/", |
<|file_name|>test_import_issues.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# test_import_issues.py - Test issue importing.
# Copyright (C) 2008 by Drew Hess <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Test issue importing."""
import unittest
import lobbyists
import sqlite3
import util
class TestImportIssues(unittest.TestCase):
def test_import_issues(self):
"""Import issues"""
filings = list(lobbyists.parse_filings(util.testpath('issues.xml')))
con = sqlite3.connect(':memory:')
con = lobbyists.create_db(con)
cur = con.cursor()
self.failUnless(lobbyists.import_filings(cur, filings))
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute("SELECT * FROM issue")
rows = list(cur)
row = rows.pop()
self.failUnlessEqual(row['id'], 23)
self.failUnlessEqual(row['code'],
'ENERGY/NUCLEAR')
self.failUnlessEqual(row['specific_issue'],
'\r\nComprehensive Energy Bill')
row = rows.pop()
self.failUnlessEqual(row['id'], 22)
self.failUnlessEqual(row['code'],
'TRANSPORTATION')
self.failUnlessEqual(row['specific_issue'],
'\r\nH.R. 1495 Water Resources Development Act (WRDA) - the WRDA provisions to modernize the locks on the Upper Mississippi and Illinois Rivers are essential if U.S. agriculture is going to remain competitive in the global marketplace.\r\nH.R. 1495 the Water Resources Development Act of 2007 (WRDA) - conference report - Title VIII of the legislation includes authorization for the Corps of Engineers to construct new 1,200 foot locks on the Upper Mississippi and Illinois Rivers\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 21)
self.failUnlessEqual(row['code'],
'IMMIGRATION')
self.failUnlessEqual(row['specific_issue'],
'\r\nImmigration - Thanking Senator Lincoln and her staff for the hard work and long hours and dedication they presented in an effort to develop a comprehensive immigration reform.\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 20)
self.failUnlessEqual(row['code'],
'AGRICULTURE')
self.failUnlessEqual(row['specific_issue'],
'\r\nFY08 Agriculture Appropriations Bill - (Sec. 738) amendment to prohibit USDA from spending money for health inspection of horses.\n\nH.R. 3161, the FY08 Ag spending bill - amendments: King/Kingston amendment to strike Sec. 738. It would limit USDA authority for equine health inspection, effectively restricting the movement of all horses; Ackerman amendment prohibits funding for Food Safety and Inspection Service (FSIS) inspections in facilities that process nonambulatory or downer livestock; Whitfield-Spratt-Rahall-Chandler amendment to restrict USDA inspection of horses intended for processing for human consumption.\n\nPayment Limits.\r\nFarm Bill: tax title, reductions in direct payments, counter-cyclical revenue option, senate ag committee markup on farm bill, amendments seeking further reform to payment limits and adjusted gross income restrictions.\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 19)
self.failUnlessEqual(row['code'],
'TRADE (DOMESTIC/FOREIGN)')
self.failUnlessEqual(row['specific_issue'],
'\r\nU.S. -Peru Trade Promotion Agreement (TPA) - the goal is to increase U.S. agriculture exports and increase market share.')
row = rows.pop()
self.failUnlessEqual(row['id'], 18)
self.failUnlessEqual(row['code'],
'EDUCATION')
self.failUnlessEqual(row['specific_issue'],
'\r\nFY08 Labor, HHS and Education spending. Perkins Amendment (federal funding for FFA and career and technical education).')
row = rows.pop()
self.failUnlessEqual(row['id'], 17)
self.failUnlessEqual(row['code'],
'ROADS/HIGHWAY')
self.failUnlessEqual(row['specific_issue'],
'\r\nH.R. 3098 to restore farm truck exemptions from federal motor carrier vehicle regulations.')
row = rows.pop()
self.failUnlessEqual(row['id'], 16)
self.failUnlessEqual(row['code'],
'DEFENSE')
self.failUnlessEqual(row['specific_issue'],
'H.R.3222 & Senate FY08 Defense Appropriations-Navy, Army & SOCOM R&D\nH.R.1585 & S.1547 FY08 Defense Authorizations-Navy, Army & SOCOM R&D\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 15)
self.failUnlessEqual(row['code'],
'HOMELAND SECURITY')
self.failUnlessEqual(row['specific_issue'],
'H.R.3222 & Senate FY08 Defense Appropriations-Navy, Army & SOCOM R&D\nH.R.1585 & S.1547 FY08 Defense Authorizations-Navy, Army & SOCOM R&D\nH.R.2638 & S.1644 FY08 DHS AppropriationsBill-CRP')
row = rows.pop()
self.failUnlessEqual(row['id'], 14)
self.failUnlessEqual(row['code'],
'BUDGET/APPROPRIATIONS')
self.failUnlessEqual(row['specific_issue'],
'H.R.3222 & Senate FY08 Defense Appropriations-Navy, Army & SOCOM R&D\nH.R.1585 & S.1547 FY08 Defense Authorizations-Navy, Army & SOCOM R&D\nH.R.2638 & S.1644 FY08 DHS AppropriationsBill-CRP')
row = rows.pop()
self.failUnlessEqual(row['id'], 13)
self.failUnlessEqual(row['code'],
'DEFENSE')
self.failUnlessEqual(row['specific_issue'],
'DEFENSE AUTHORIZATION, DEFENSE APPROPRIATIONS, VETERANS, DEFENSE HEALTH CARE, ARMED FORCES RETIREMENT, ARMED FORCES PERSONNEL BENEFITS, EMERGING DEFENSE RELATED ISSUES')
row = rows.pop()
self.failUnlessEqual(row['id'], 12)
self.failUnlessEqual(row['code'],
'BANKING')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 11)
self.failUnlessEqual(row['code'],
'REAL ESTATE/LAND USE/CONSERVATION')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 10)
self.failUnlessEqual(row['code'],
'FINANCIAL INSTITUTIONS/INVESTMENTS/SECURITIES')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 9)
self.failUnlessEqual(row['code'],
'FOREIGN RELATIONS')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 8)
self.failUnlessEqual(row['code'],
'LAW ENFORCEMENT/CRIME/CRIMINAL JUSTICE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 7)
self.failUnlessEqual(row['code'],
'FAMILY ISSUES/ABORTION/ADOPTION')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 6)
self.failUnlessEqual(row['code'],
'HEALTH ISSUES')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 5)
self.failUnlessEqual(row['code'],
'MEDICARE/MEDICAID')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 4)
self.failUnlessEqual(row['code'],
'WELFARE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 3)
self.failUnlessEqual(row['code'],
'BUDGET/APPROPRIATIONS')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 2)
self.failUnlessEqual(row['code'],
'TAXATION/INTERNAL REVENUE CODE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 1)
self.failUnlessEqual(row['code'],
'INSURANCE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
self.failUnlessEqual(len(rows), 0)
def test_import_issues_issue_code(self):
"""Importing issues should fill issue_code table."""
filings = list(lobbyists.parse_filings(util.testpath('issues.xml')))
con = sqlite3.connect(':memory:')
con = lobbyists.create_db(con)
cur = con.cursor()
self.failUnless(lobbyists.import_filings(cur, filings))
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute("SELECT * FROM issue_code")
rows = list(cur)
row = rows.pop()
self.failUnlessEqual(row['code'],
'ENERGY/NUCLEAR')
row = rows.pop()
self.failUnlessEqual(row['code'],
'TRANSPORTATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'IMMIGRATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'AGRICULTURE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'TRADE (DOMESTIC/FOREIGN)')
row = rows.pop()
self.failUnlessEqual(row['code'],
'EDUCATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'ROADS/HIGHWAY')
row = rows.pop()
self.failUnlessEqual(row['code'],
'HOMELAND SECURITY')
row = rows.pop()
self.failUnlessEqual(row['code'],
'DEFENSE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'BANKING')<|fim▁hole|> self.failUnlessEqual(row['code'],
'REAL ESTATE/LAND USE/CONSERVATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'FINANCIAL INSTITUTIONS/INVESTMENTS/SECURITIES')
row = rows.pop()
self.failUnlessEqual(row['code'],
'FOREIGN RELATIONS')
row = rows.pop()
self.failUnlessEqual(row['code'],
'LAW ENFORCEMENT/CRIME/CRIMINAL JUSTICE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'FAMILY ISSUES/ABORTION/ADOPTION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'HEALTH ISSUES')
row = rows.pop()
self.failUnlessEqual(row['code'],
'MEDICARE/MEDICAID')
row = rows.pop()
self.failUnlessEqual(row['code'],
'WELFARE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'BUDGET/APPROPRIATIONS')
row = rows.pop()
self.failUnlessEqual(row['code'],
'TAXATION/INTERNAL REVENUE CODE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'INSURANCE')
self.failUnlessEqual(len(rows), 0)
def test_import_filings_to_issues(self):
"""Issues are matched up with filings in the database."""
filings = list(lobbyists.parse_filings(util.testpath('issues.xml')))
con = sqlite3.connect(':memory:')
con = lobbyists.create_db(con)
cur = con.cursor()
self.failUnless(lobbyists.import_filings(cur, filings))
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute("SELECT * FROM filing_issues")
rows = list(cur)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 23)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 22)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 21)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 20)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 19)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 18)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 17)
row = rows.pop()
self.failUnlessEqual(row['filing'], '05804BE5-57C9-41BF-97B2-0120826D4393')
self.failUnlessEqual(row['issue'], 16)
row = rows.pop()
self.failUnlessEqual(row['filing'], '05804BE5-57C9-41BF-97B2-0120826D4393')
self.failUnlessEqual(row['issue'], 15)
row = rows.pop()
self.failUnlessEqual(row['filing'], '05804BE5-57C9-41BF-97B2-0120826D4393')
self.failUnlessEqual(row['issue'], 14)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'F56492FC-4FBD-4824-83E1-0004B30F0519')
self.failUnlessEqual(row['issue'], 13)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 12)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 11)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 10)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 9)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 8)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 7)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 6)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 5)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 4)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 3)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 2)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 1)
self.failUnlessEqual(len(rows), 0)
if __name__ == '__main__':
unittest.main()<|fim▁end|> |
row = rows.pop() |
<|file_name|>bitcoin_el_GR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="el_GR" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Colossus</source>
<translation>Σχετικά με το Colossus</translation>
</message>
<message>
<location line="+39"/>
<source><b>Colossus</b> version</source>
<translation>Έκδοση Colossus</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Πνευματική ιδιοκτησία </translation>
</message>
<message>
<location line="+0"/>
<source>The Colossus developers</source>
<translation>Οι Colossus προγραμματιστές </translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Βιβλίο Διευθύνσεων</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Διπλό-κλικ για επεξεργασία της διεύθυνσης ή της ετικέτας</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Δημιούργησε νέα διεύθυνση</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Αντέγραψε την επιλεγμένη διεύθυνση στο πρόχειρο του συστήματος</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Νέα διεύθυνση</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Colossus addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Αυτές είναι οι Colossus διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Αντιγραφή διεύθυνσης</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Δείξε &QR κωδικα</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Colossus address</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Colossus</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>&Υπέγραψε το μήνυμα</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Αντιγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&Εξαγωγή</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Colossus address</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Colossus</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Διαγραφή</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Colossus addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Αυτές είναι οι Colossus διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Αντιγραφή &επιγραφής</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Επεξεργασία</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Αποστολή νομισμάτων</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Εξαγωγή Δεδομενων Βιβλίου Διευθύνσεων</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Εξαγωγή λαθών</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Αδυναμία εγγραφής στο αρχείο %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Ετικέτα</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Διεύθυνση</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(χωρίς ετικέτα)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Φράση πρόσβασης </translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Βάλτε κωδικό πρόσβασης</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Νέος κωδικός πρόσβασης</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Επανέλαβε τον νέο κωδικό πρόσβασης</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Εισάγετε τον νέο κωδικό πρόσβασης στον πορτοφόλι <br/> Παρακαλώ χρησιμοποιείστε ένα κωδικό με <b> 10 ή περισσότερους τυχαίους χαρακτήρες</b> ή <b> οχτώ ή παραπάνω λέξεις</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Κρυπτογράφησε το πορτοφόλι</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Αυτη η ενεργεία χρειάζεται τον κωδικό του πορτοφολιού για να ξεκλειδώσει το πορτοφόλι.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Ξεκλειδωσε το πορτοφολι</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Αυτη η ενεργεια χρειάζεται τον κωδικο του πορτοφολιου για να αποκρυπτογραφησειι το πορτοφολι.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Αποκρυπτογράφησε το πορτοφολι</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Άλλαξε κωδικο πρόσβασης</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Εισάγετε τον παλιό και τον νεο κωδικο στο πορτοφολι.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Επιβεβαίωσε την κρυπτογραφηση του πορτοφολιού</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>Προσοχη: Εαν κρυπτογραφησεις το πορτοφολι σου και χάσεις τον κωδικο σου θα χάσεις <b> ΟΛΑ ΣΟΥ ΤΑ LITECOINS</b>!
Είσαι σίγουρος ότι θέλεις να κρυπτογραφησεις το πορτοφολι;</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Είστε σίγουροι ότι θέλετε να κρυπτογραφήσετε το πορτοφόλι σας;</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>ΣΗΜΑΝΤΙΚΟ: Τα προηγούμενα αντίγραφα ασφαλείας που έχετε κάνει από το αρχείο του πορτοφόλιου σας θα πρέπει να αντικατασταθουν με το νέο που δημιουργείται, κρυπτογραφημένο αρχείο πορτοφόλιου. Για λόγους ασφαλείας, τα προηγούμενα αντίγραφα ασφαλείας του μη κρυπτογραφημένου αρχείου πορτοφόλιου θα καταστουν άχρηστα μόλις αρχίσετε να χρησιμοποιείτε το νέο κρυπτογραφημένο πορτοφόλι. </translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Προσοχη: το πλήκτρο Caps Lock είναι ενεργο.</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Κρυπτογραφημενο πορτοφολι</translation>
</message>
<message>
<location line="-56"/>
<source>Colossus will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your colossuss from being stolen by malware infecting your computer.</source>
<translation>Το Colossus θα κλεισει τώρα για να τελειώσει την διαδικασία κρυπτογραφησης. Θυμησου ότι κρυπτογραφώντας το πορτοφολι σου δεν μπορείς να προστατέψεις πλήρως τα colossuss σου από κλοπή στην περίπτωση όπου μολυνθεί ο υπολογιστής σου με κακόβουλο λογισμικο.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Η κρυπτογραφηση του πορτοφολιού απέτυχε</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Η κρυπτογράφηση του πορτοφολιού απέτυχε λογω εσωτερικού σφάλματος. Το πορτοφολι δεν κρυπτογραφηθηκε.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Οι εισαχθέντες κωδικοί δεν ταιριάζουν.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Ο κωδικος που εισήχθη για την αποκρυπτογραφηση του πορτοφολιού ήταν λαθος.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Η αποκρυπτογραφηση του πορτοφολιού απέτυχε</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Ο κωδικος του πορτοφολιού άλλαξε με επιτυχία.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Υπογραφή &Μηνύματος...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Συγχρονισμός με το δίκτυο...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Επισκόπηση</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Εμφάνισε γενική εικονα του πορτοφολιού</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Συναλλαγές</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Περιήγηση στο ιστορικο συνναλαγων</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Εξεργασια της λιστας των αποθηκευμενων διευθύνσεων και ετικετων</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Εμφάνισε την λίστα των διευθύνσεων για την παραλαβή πληρωμων</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>Έ&ξοδος</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Εξοδος από την εφαρμογή</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Colossus</source>
<translation>Εμφάνισε πληροφορίες σχετικά με το Colossus</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Σχετικά με &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Εμφάνισε πληροφορίες σχετικά με Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Επιλογές...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Κρυπτογράφησε το πορτοφόλι</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Αντίγραφο ασφαλείας του πορτοφολιού</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Άλλαξε κωδικο πρόσβασης</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Εισαγωγή μπλοκ από τον σκληρο δίσκο ... </translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Φόρτωση ευρετηρίου μπλοκ στον σκληρο δισκο...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Colossus address</source>
<translation>Στείλε νομισματα σε μια διεύθυνση colossus</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Colossus</source>
<translation>Επεργασία ρυθμισεων επιλογών για το Colossus</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Δημιουργία αντιγράφου ασφαλείας πορτοφολιού σε άλλη τοποθεσία</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Αλλαγή του κωδικού κρυπτογράφησης του πορτοφολιού</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>&Παράθυρο αποσφαλμάτωσης</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Άνοιγμα κονσόλας αποσφαλμάτωσης και διαγνωστικών</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Colossus</source>
<translation>Colossus</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Πορτοφόλι</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Αποστολή</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Παραλαβή </translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Διεύθυνσεις</translation>
</message>
<message>
<location line="+22"/>
<source>&About Colossus</source>
<translation>&Σχετικα:Colossus</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Εμφάνισε/Κρύψε</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Εμφάνιση ή αποκρύψη του κεντρικου παράθυρου </translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Κρυπτογραφήστε τα ιδιωτικά κλειδιά που ανήκουν στο πορτοφόλι σας </translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Colossus addresses to prove you own them</source>
<translation>Υπογράψτε ένα μήνυμα για να βεβαιώσετε πως είστε ο κάτοχος αυτής της διεύθυνσης</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Colossus addresses</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Colossus</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Αρχείο</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Ρυθμίσεις</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Βοήθεια</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Εργαλειοθήκη καρτελών</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Colossus client</source>
<translation>Πελάτης Colossus</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Colossus network</source>
<translation><numerusform>%n ενεργή σύνδεση στο δίκτυο Colossus</numerusform><numerusform>%n ενεργές συνδέσεις στο δίκτυο Βitcoin</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation>Η πηγή του μπλοκ δεν ειναι διαθέσιμη... </translation>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>Μεταποιημένα %1 απο % 2 (κατ 'εκτίμηση) μπλοκ της ιστορίας της συναλλαγής. </translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Έγινε λήψη %1 μπλοκ ιστορικού συναλλαγών</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n ώρες </numerusform><numerusform>%n ώρες </numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n ημέρες </numerusform><numerusform>%n ημέρες </numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n εβδομαδες</numerusform><numerusform>%n εβδομαδες</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>%1 πίσω</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>Το τελευταίο μπλοκ που ελήφθη δημιουργήθηκε %1 πριν.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Οι συναλλαγές μετά από αυτό δεν θα είναι ακόμη ορατες.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Σφάλμα</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Προειδοποίηση</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Πληροφορία</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Η συναλλαγή ξεπερνάει το όριο.
Μπορεί να ολοκληρωθεί με μια αμοιβή των %1, η οποία αποδίδεται στους κόμβους που επεξεργάζονται τις συναλλαγές και βοηθούν στην υποστήριξη του δικτύου.
Θέλετε να συνεχίσετε;</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Ενημερωμένο</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Ενημέρωση...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Επιβεβαίωση αμοιβής συναλλαγής</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Η συναλλαγή απεστάλη</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Εισερχόμενη συναλλαγή</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Ημερομηνία: %1
Ποσό: %2
Τύπος: %3
Διεύθυνση: %4
</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>Χειρισμός URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Colossus address or malformed URI parameters.</source>
<translation>Το URI δεν μπορεί να αναλυθεί! Αυτό μπορεί να προκληθεί από μια μη έγκυρη διεύθυνση Colossus ή ακατάλληλη παραμέτρο URI.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Το πορτοφόλι είναι <b>κρυπτογραφημένο</b> και <b>ξεκλείδωτο</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Το πορτοφόλι είναι <b>κρυπτογραφημένο</b> και <b>κλειδωμένο</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Colossus can no longer continue safely and will quit.</source>
<translation>Παρουσιάστηκε ανεπανόρθωτο σφάλμα. Το Colossus δεν μπορεί πλέον να συνεχίσει με ασφάλεια και θα τερματισθει.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Ειδοποίηση Δικτύου</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Επεξεργασία Διεύθυνσης</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Επιγραφή</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Η επιγραφή που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Διεύθυνση</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Η διεύθυνση που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων. Μπορεί να τροποποιηθεί μόνο για τις διευθύνσεις αποστολής.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Νέα διεύθυνση λήψης</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Νέα διεύθυνση αποστολής</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Επεξεργασία διεύθυνσης λήψης</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Επεξεργασία διεύθυνσης αποστολής</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Η διεύθυνση "%1" βρίσκεται ήδη στο βιβλίο διευθύνσεων.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Colossus address.</source>
<translation>Η διεύθυνση "%1" δεν είναι έγκυρη Colossus διεύθυνση.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Δεν είναι δυνατό το ξεκλείδωμα του πορτοφολιού.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Η δημιουργία νέου κλειδιού απέτυχε.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Colossus-Qt</source>
<translation>colossus-qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>έκδοση</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Χρήση:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>επιλογής γραμμής εντολών</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>επιλογές UI</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Όρισε γλώσσα, για παράδειγμα "de_DE"(προεπιλογή:τοπικές ρυθμίσεις)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Έναρξη ελαχιστοποιημένο</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Εμφάνισε την οθόνη εκκίνησης κατά την εκκίνηση(προεπιλογή:1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Ρυθμίσεις</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Κύριο</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation>Η προαιρετική αμοιβή για κάθε kB επισπεύδει την επεξεργασία των συναλλαγών σας. Οι περισσότερες συναλλαγές είναι 1 kB. </translation>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Αμοιβή &συναλλαγής</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Colossus after logging in to the system.</source>
<translation>Αυτόματη εκκίνηση του Colossus μετά την εισαγωγή στο σύστημα</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Colossus on system login</source>
<translation>&Έναρξη του Βιtcoin κατά την εκκίνηση του συστήματος</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Επαναφορα όλων των επιλογων του πελάτη σε default.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>Επαναφορα ρυθμίσεων</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>&Δίκτυο</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Colossus client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Αυτόματο άνοιγμα των θυρών Colossus στον δρομολογητή. Λειτουργεί μόνο αν ο δρομολογητής σας υποστηρίζει τη λειτουργία UPnP.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Απόδοση θυρών με χρήστη &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Colossus network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Σύνδεση στο Colossus δίκτυο μέσω διαμεσολαβητή SOCKS4 (π.χ. για σύνδεση μέσω Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Σύνδεση μέσω διαμεσολαβητή SOCKS</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP διαμεσολαβητή:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Διεύθυνση IP του διαμεσολαβητή (π.χ. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Θύρα:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Θύρα διαμεσολαβητή</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &Έκδοση:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>SOCKS εκδοση του διαμεσολαβητη (e.g. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Παράθυρο</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Εμφάνιση μόνο εικονιδίου στην περιοχή ειδοποιήσεων κατά την ελαχιστοποίηση</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Ελαχιστοποίηση στην περιοχή ειδοποιήσεων αντί της γραμμής εργασιών</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Ελαχιστοποίηση αντί για έξοδο κατά το κλείσιμο του παραθύρου</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>Ε&λαχιστοποίηση κατά το κλείσιμο</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>%Απεικόνιση</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>Γλώσσα περιβάλλοντος εργασίας: </translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Colossus.</source>
<translation>Εδώ μπορεί να ρυθμιστεί η γλώσσα διεπαφής χρήστη. Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Colossus.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Μονάδα μέτρησης:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Διαλέξτε την προεπιλεγμένη υποδιαίρεση που θα εμφανίζεται όταν στέλνετε νομίσματα.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Colossus addresses in the transaction list or not.</source>
<translation>Επιλέξτε αν θέλετε να εμφανίζονται οι διευθύνσεις Colossus στη λίστα συναλλαγών.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Εμφάνιση διευθύνσεων στη λίστα συναλλαγών</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&ΟΚ</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Ακύρωση</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Εφαρμογή</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>προεπιλογή</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Επιβεβαιώση των επιλογων επαναφοράς </translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Για ορισμένες ρυθμίσεις πρεπει η επανεκκίνηση να τεθεί σε ισχύ.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Θέλετε να προχωρήσετε;</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Προειδοποίηση</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Colossus.</source>
<translation>Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Colossus.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Φόρμα</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Colossus network after a connection is established, but this process has not completed yet.</source>
<translation>Οι πληροφορίες που εμφανίζονται μπορεί να είναι ξεπερασμένες. Το πορτοφόλι σας συγχρονίζεται αυτόματα με το δίκτυο Colossus μετά από μια σύνδεση, αλλά αυτή η διαδικασία δεν έχει ακόμη ολοκληρωθεί. </translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Υπόλοιπο</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Ανεπιβεβαίωτες</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Πορτοφόλι</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Ανώριμος</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Εξορυγμενο υπόλοιπο που δεν έχει ακόμα ωριμάσει </translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Πρόσφατες συναλλαγές</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Το τρέχον υπόλοιπο</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Το άθροισμα των συναλλαγών που δεν έχουν ακόμα επιβεβαιωθεί και δεν προσμετρώνται στο τρέχον υπόλοιπό σας</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>εκτός συγχρονισμού</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start colossus: click-to-pay handler</source>
<translation>Δεν είναι δυνατή η εκκίνηση του Colossus: click-to-pay handler</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Κώδικας QR</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Αίτηση πληρωμής</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Ποσό:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Επιγραφή:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Μήνυμα:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Αποθήκευση ως...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Σφάλμα κατά την κωδικοποίηση του URI σε κώδικα QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>Το αναγραφόμενο ποσό δεν είναι έγκυρο, παρακαλούμε να το ελέγξετε.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Το αποτέλεσμα της διεύθυνσης είναι πολύ μεγάλο. Μειώστε το μέγεθος για το κείμενο της ετικέτας/ μηνύματος.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Αποθήκευση κώδικα QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Εικόνες PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Όνομα Πελάτη</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>Μη διαθέσιμο</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Έκδοση Πελάτη</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Πληροφορία</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Χρησιμοποιηση της OpenSSL εκδοσης</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Χρόνος εκκίνησης</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Δίκτυο</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Αριθμός συνδέσεων</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Στο testnet</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>αλυσίδα εμποδισμού</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Τρέχον αριθμός μπλοκ</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Κατ' εκτίμηση συνολικά μπλοκς</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Χρόνος τελευταίου μπλοκ</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Άνοιγμα</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>επιλογής γραμμής εντολών</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Colossus-Qt help message to get a list with possible Colossus command-line options.</source>
<translation>Εμφανιση του Colossus-Qt μήνυματος βοήθειας για να πάρετε μια λίστα με τις πιθανές επιλογές Colossus γραμμής εντολών.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Εμφάνιση</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Κονσόλα</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Ημερομηνία κατασκευής</translation>
</message>
<message>
<location line="-104"/>
<source>Colossus - Debug window</source>
<translation>Colossus - Παράθυρο αποσφαλμάτωσης</translation>
</message>
<message>
<location line="+25"/>
<source>Colossus Core</source>
<translation>Colossus Core</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Αρχείο καταγραφής εντοπισμού σφαλμάτων </translation>
</message>
<message>
<location line="+7"/>
<source>Open the Colossus debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Ανοίξτε το αρχείο καταγραφής εντοπισμού σφαλμάτων από τον τρέχοντα κατάλογο δεδομένων. Αυτό μπορεί να πάρει μερικά δευτερόλεπτα για τα μεγάλα αρχεία καταγραφής. </translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Καθαρισμός κονσόλας</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Colossus RPC console.</source>
<translation>Καλώς ήρθατε στην Colossus RPC κονσόλα.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Χρησιμοποιήστε το πάνω και κάτω βέλος για να περιηγηθείτε στο ιστορικο, και <b>Ctrl-L</b> για εκκαθαριση οθονης.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Γράψτε <b>βοήθεια</b> για μια επισκόπηση των διαθέσιμων εντολών</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Αποστολή νομισμάτων</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Αποστολή σε πολλούς αποδέκτες ταυτόχρονα</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Προσθήκη αποδέκτη</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Διαγραφή όλων των πεδίων συναλλαγής</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Καθαρισμός &Όλων</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Υπόλοιπο:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123,456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Επιβεβαίωση αποστολής</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>Αποστολη</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> σε %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Επιβεβαίωση αποστολής νομισμάτων</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Είστε βέβαιοι για την αποστολή %1;</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>και</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Η διεύθυνση του αποδέκτη δεν είναι σωστή. Παρακαλώ ελέγξτε ξανά.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Το ποσό πληρωμής πρέπει να είναι μεγαλύτερο από 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Το ποσό ξεπερνάει το διαθέσιμο υπόλοιπο</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Το σύνολο υπερβαίνει το υπόλοιπό σας όταν συμπεριληφθεί και η αμοιβή %1</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Βρέθηκε η ίδια διεύθυνση δύο φορές. Επιτρέπεται μία μόνο εγγραφή για κάθε διεύθυνση, σε κάθε διαδικασία αποστολής.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Σφάλμα: Η δημιουργία της συναλλαγής απέτυχε</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Σφάλμα: Η συναλλαγή απερρίφθη. Αυτό ενδέχεται να συμβαίνει αν κάποια από τα νομίσματα έχουν ήδη ξοδευθεί, όπως αν χρησιμοποιήσατε αντίγραφο του wallet.dat και τα νομίσματα ξοδεύθηκαν εκεί.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Φόρμα</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>&Ποσό:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Πληρωμή &σε:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Διεύθυνση αποστολής της πληρωμής (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Εισάγετε μια επιγραφή για αυτή τη διεύθυνση ώστε να καταχωρηθεί στο βιβλίο διευθύνσεων</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Επιγραφή</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Επικόλληση διεύθυνσης από το πρόχειρο</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Αφαίρεση αποδέκτη</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Colossus address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Εισάγετε μια διεύθυνση Colossus (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Υπογραφές - Είσοδος / Επαλήθευση μήνυματος </translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Υπογραφή Μηνύματος</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Μπορείτε να υπογράφετε μηνύματα με τις διευθύνσεις σας, ώστε ν' αποδεικνύετε πως αυτές σας ανήκουν. Αποφεύγετε να υπογράφετε κάτι αόριστο καθώς ενδέχεται να εξαπατηθείτε. Υπογράφετε μόνο πλήρης δηλώσεις με τις οποίες συμφωνείτε.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Εισάγετε μια διεύθυνση Colossus (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Επικόλληση διεύθυνσης από το βιβλίο διευθύνσεων</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Εισάγετε εδώ το μήνυμα που θέλετε να υπογράψετε</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Υπογραφή</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Αντέγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Colossus address</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Colossus</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Υπογραφη μήνυματος</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Επαναφορά όλων των πεδίων μήνυματος</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Καθαρισμός &Όλων</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Πληκτρολογήστε την υπογραφή διεύθυνσης, μήνυμα (βεβαιωθείτε ότι έχετε αντιγράψει τις αλλαγές γραμμής, κενά, tabs, κ.λπ. ακριβώς) και την υπογραφή παρακάτω, για να ελέγξει το μήνυμα. Να είστε προσεκτικοί για να μην διαβάσετε περισσότερα στην υπογραφή ό, τι είναι στην υπογραφή ίδιο το μήνυμα , για να μην εξαπατηθούν από έναν άνθρωπο -in - the-middle επίθεση.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Εισάγετε μια διεύθυνση Colossus (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Colossus address</source>
<translation>Υπογράψτε ένα μήνυμα για ν' αποδείξετε πως υπογραφθηκε απο μια συγκεκριμένη διεύθυνση Colossus</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>Επιβεβαίωση μηνύματος</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Επαναφορά όλων επαλήθευμενων πεδίων μήνυματος </translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Colossus address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Εισάγετε μια διεύθυνση Colossus (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Κάντε κλικ στο "Υπογραφή Μηνύματος" για να λάβετε την υπογραφή</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Colossus signature</source>
<translation>Εισαγωγή υπογραφής Colossus</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Η διεύθυνση που εισήχθη είναι λάθος.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Παρακαλούμε ελέγξτε την διεύθυνση και δοκιμάστε ξανά.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>Η διεύθυνση που έχει εισαχθεί δεν αναφέρεται σε ένα πλήκτρο.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Το προσωπικό κλειδί εισαγμενης διευθυνσης δεν είναι διαθέσιμο.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Η υπογραφή του μηνύματος απέτυχε.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Μήνυμα υπεγράφη.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Η υπογραφή δεν μπόρεσε να αποκρυπτογραφηθεί.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Παρακαλούμε ελέγξτε την υπογραφή και δοκιμάστε ξανά.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Η υπογραφή δεν ταιριάζει με το μήνυμα. </translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Η επιβεβαίωση του μηνύματος απέτυχε</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Μήνυμα επιβεβαιώθηκε.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Colossus developers</source>
<translation>Οι Colossus προγραμματιστές </translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Ανοιχτό μέχρι %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/χωρίς σύνδεση;</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/χωρίς επιβεβαίωση</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 επιβεβαιώσεις</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Κατάσταση</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Πηγή</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Δημιουργία </translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Από</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Προς</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation> δική σας διεύθυνση </translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>eπιγραφή</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Πίστωση </translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>μη αποδεκτό</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debit</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Τέλος συναλλαγής </translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Καθαρό ποσό</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Μήνυμα</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Σχόλιο:</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID Συναλλαγής:</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Πρέπει να περιμένετε 120 μπλοκ πριν μπορέσετε να χρησιμοποιήσετε τα νομίσματα που έχετε δημιουργήσει. Το μπλοκ που δημιουργήσατε μεταδόθηκε στο δίκτυο για να συμπεριληφθεί στην αλυσίδα των μπλοκ. Αν δεν μπει σε αυτή θα μετατραπεί σε "μη αποδεκτό" και δε θα μπορεί να καταναλωθεί. Αυτό συμβαίνει σπάνια όταν κάποιος άλλος κόμβος δημιουργήσει ένα μπλοκ λίγα δευτερόλεπτα πριν από εσάς.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Πληροφορίες αποσφαλμάτωσης</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Συναλλαγή</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>εισροές </translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>αληθής</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>αναληθής </translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, δεν έχει ακόμα μεταδοθεί μ' επιτυχία</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>άγνωστο</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Λεπτομέρειες συναλλαγής</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Αυτό το παράθυρο δείχνει μια λεπτομερή περιγραφή της συναλλαγής</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Τύπος</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Διεύθυνση</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Ανοιχτό μέχρι %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Χωρίς σύνδεση (%1 επικυρώσεις)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Χωρίς επιβεβαίωση (%1 από %2 επικυρώσεις)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Επικυρωμένη (%1 επικυρώσεις)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Αυτό το μπλοκ δεν έχει παραληφθεί από κανέναν άλλο κόμβο και κατά πάσα πιθανότητα θα απορριφθεί!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Δημιουργήθηκε αλλά απορρίφθηκε</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Παραλαβή με</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Ελήφθη από</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Αποστολή προς</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Πληρωμή προς εσάς</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Εξόρυξη</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(δ/α)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Κατάσταση συναλλαγής. Πηγαίνετε το ποντίκι πάνω από αυτό το πεδίο για να δείτε τον αριθμό των επικυρώσεων</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Ημερομηνία κι ώρα λήψης της συναλλαγής.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Είδος συναλλαγής.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Διεύθυνση αποστολής της συναλλαγής.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Ποσό που αφαιρέθηκε ή προστέθηκε στο υπόλοιπο.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Όλα</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Σήμερα</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Αυτή την εβδομάδα</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Αυτόν τον μήνα</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Τον προηγούμενο μήνα</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Αυτό το έτος</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Έκταση...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Ελήφθη με</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Απεστάλη προς</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Προς εσάς</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Εξόρυξη</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Άλλο</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Αναζήτηση με βάση τη διεύθυνση ή την επιγραφή</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Ελάχιστο ποσό</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Αντιγραφή διεύθυνσης</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Αντιγραφή επιγραφής</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Αντιγραφή ποσού</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Αντιγραφη του ID Συναλλαγής</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Επεξεργασία επιγραφής</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Εμφάνιση λεπτομερειών συναλλαγής</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Εξαγωγή Στοιχείων Συναλλαγών</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Επικυρωμένες</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Ημερομηνία</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Τύπος</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Επιγραφή</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Διεύθυνση</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Ποσό</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Σφάλμα εξαγωγής</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Αδυναμία εγγραφής στο αρχείο %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Έκταση:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>έως</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Αποστολή νομισμάτων</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation>&Εξαγωγή</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Αντίγραφο ασφαλείας του πορτοφολιού</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Αρχεία δεδομένων πορτοφολιού (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Αποτυχία κατά τη δημιουργία αντιγράφου</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Παρουσιάστηκε σφάλμα κατά την αποθήκευση των δεδομένων πορτοφολιού στη νέα τοποθεσία.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Η δημιουργια αντιγραφου ασφαλειας πετυχε</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Τα δεδομένα πορτοφόλιου αποθηκεύτηκαν με επιτυχία στη νέα θέση. </translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Colossus version</source>
<translation>Έκδοση Colossus</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Χρήση:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or colossusd</source>
<translation>Αποστολή εντολής στον εξυπηρετητή ή στο colossusd</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Λίστα εντολών</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Επεξήγηση εντολής</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Επιλογές:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: colossus.conf)</source>
<translation>Ορίστε αρχείο ρυθμίσεων (προεπιλογή: colossus.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: colossusd.pid)</source>
<translation>Ορίστε αρχείο pid (προεπιλογή: colossusd.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Ορισμός φακέλου δεδομένων</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Όρισε το μέγεθος της βάσης προσωρινής αποθήκευσης σε megabytes(προεπιλογή:25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 8500 or testnet: 18000)</source>
<translation>Εισερχόμενες συνδέσεις στη θύρα <port> (προεπιλογή: 8500 ή στο testnet: 18000)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Μέγιστες αριθμός συνδέσεων με τους peers <n> (προεπιλογή: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Σύνδεση σε έναν κόμβο για την ανάκτηση διευθύνσεων από ομοτίμους, και αποσυνδέσh</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Διευκρινίστε τη δικιά σας δημόσια διεύθυνση.</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Όριο αποσύνδεσης προβληματικών peers (προεπιλογή: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Δευτερόλεπτα πριν επιτραπεί ξανά η σύνδεση των προβληματικών peers (προεπιλογή: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η πόρτα RPC %u για αναμονή IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 20120 or testnet: 17300)</source>
<translation>Εισερχόμενες συνδέσεις JSON-RPC στη θύρα <port> (προεπιλογή: 20120 or testnet: 17300)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Αποδοχή εντολών κονσόλας και JSON-RPC</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Εκτέλεση στο παρασκήνιο κι αποδοχή εντολών</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Χρήση του δοκιμαστικού δικτύου</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Να δέχεσαι συνδέσεις από έξω(προεπιλογή:1)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=colossusrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Colossus Alert" [email protected]
</source>
<translation>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=colossusrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Colossus Alert" [email protected]
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η υποδοχη RPC %u για αναμονη του IPv6, επεσε πισω στο IPv4:%s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Αποθηκευση σε συγκεκριμένη διεύθυνση. Χρησιμοποιήστε τα πλήκτρα [Host] : συμβολισμός θύρα για IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Colossus is probably already running.</source>
<translation>Αδυναμία κλειδώματος του φακέλου δεδομένων %s. Πιθανώς το Colossus να είναι ήδη ενεργό.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Σφάλμα: Η συναλλαγή απορρίφθηκε.
Αυτό ίσως οφείλεται στο ότι τα νομίσματά σας έχουν ήδη ξοδευτεί, π.χ. με την αντιγραφή του wallet.dat σε άλλο σύστημα και την χρήση τους εκεί, χωρίς η συναλλαγή να έχει καταγραφεί στο παρόν σύστημα.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Σφάλμα: Αυτή η συναλλαγή απαιτεί αμοιβή συναλλαγής τουλάχιστον %s λόγω του μεγέθους, πολυπλοκότητας ή της χρήσης πρόσφατης παραλαβής κεφαλαίου</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Εκτέλεση της εντολής όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Ορίστε το μέγιστο μέγεθος των high-priority/low-fee συναλλαγων σε bytes (προεπιλογή: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Αυτό είναι ένα προ-τεστ κυκλοφορίας - χρησιμοποιήστε το με δική σας ευθύνη - δεν χρησιμοποιείτε για εξόρυξη ή για αλλες εφαρμογές</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Προειδοποίηση: Η παράμετρος -paytxfee είναι πολύ υψηλή. Πρόκειται για την αμοιβή που θα πληρώνετε για κάθε συναλλαγή που θα στέλνετε.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Προειδοποίηση: Εμφανίσεις συναλλαγων δεν μπορεί να είναι σωστες! Μπορεί να χρειαστεί να αναβαθμίσετε, ή άλλοι κόμβοι μπορεί να χρειαστεί να αναβαθμίστουν. </translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Colossus will not work properly.</source>
<translation>Προειδοποίηση: Παρακαλώ βεβαιωθείτε πως η ημερομηνία κι ώρα του συστήματός σας είναι σωστές. Αν το ρολόι του υπολογιστή σας πάει λάθος, ενδέχεται να μη λειτουργεί σωστά το Colossus.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Προειδοποίηση : Σφάλμα wallet.dat κατα την ανάγνωση ! Όλα τα κλειδιά αναγνωρισθηκαν σωστά, αλλά τα δεδομένα των συναλλαγών ή καταχωρήσεις στο βιβλίο διευθύνσεων μπορεί να είναι ελλιπείς ή λανθασμένα. </translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Προειδοποίηση : το αρχειο wallet.dat ειναι διεφθαρμένο, τα δεδομένα σώζονται ! Original wallet.dat αποθηκεύονται ως πορτοφόλι { timestamp } bak στο % s ? . . Αν το υπόλοιπο του ή τις συναλλαγές σας, είναι λάθος θα πρέπει να επαναφέρετε από ένα αντίγραφο ασφαλείας</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Προσπάθεια για ανακτησει ιδιωτικων κλειδιων από ενα διεφθαρμένο αρχειο wallet.dat </translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Αποκλεισμός επιλογων δημιουργίας: </translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Σύνδεση μόνο με ορισμένους κόμβους</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Εντοπισθηκε διεφθαρμενη βαση δεδομενων των μπλοκ</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Ανακαλύψτε την δικη σας IP διεύθυνση (προεπιλογή: 1 όταν ακούει και δεν - externalip) </translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Θελετε να δημιουργηθει τωρα η βαση δεδομενων του μπλοκ? </translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων μπλοκ</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων πορτοφόλιου %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Προειδοποίηση: Χαμηλός χώρος στο δίσκο </translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Σφάλμα: το πορτοφόλι είναι κλειδωμένο, δεν μπορεί να δημιουργηθεί συναλλαγή</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Λάθος: λάθος συστήματος:</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>ταλαιπωρηθειτε για να ακούσετε σε οποιαδήποτε θύρα. Χρήση - ακούστε = 0 , αν θέλετε αυτό.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Αποτυχία αναγνωσης των block πληροφοριων</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Η αναγνωση του μπλοκ απετυχε</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>Ο συγχρονισμος του μπλοκ ευρετηριου απετυχε</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>Η δημιουργια του μπλοκ ευρετηριου απετυχε</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Η δημιουργια των μπλοκ πληροφοριων απετυχε</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Η δημιουργια του μπλοκ απετυχε</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Αδυναμία εγγραφής πληροφοριων αρχειου</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Αποτυχία εγγραφής στη βάση δεδομένων νομίσματος</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>Αποτυχία εγγραφής δείκτη συναλλαγών </translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>Αποτυχία εγγραφής αναίρεσης δεδομένων </translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Βρες ομότιμους υπολογιστές χρησιμοποιώντας αναζήτηση DNS(προεπιλογή:1)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation>Δημιουργία νομισμάτων (προκαθορισμος: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Πόσα μπλοκ να ελέγχθουν κατά την εκκίνηση (προεπιλογή:288,0=όλα)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>Πόσο εξονυχιστική να είναι η επιβεβαίωση του μπλοκ(0-4, προεπιλογή:3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation>Δεν ειναι αρκετες περιγραφες αρχείων διαθέσιμες.</translation>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>Ορίσμος του αριθμόυ θεματων στην υπηρεσία κλήσεων RPC (προεπιλογή: 4) </translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Επαλήθευση των μπλοκ... </translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Επαλήθευση πορτοφολιου... </translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation>Ορίσμος του αριθμό των νημάτων ελέγχου σεναρίου (μέχρι 16, 0 = auto, <0 = αφήνουν τους πολλους πυρήνες δωρεάν, default: 0)</translation>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Πληροφορία</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Διατηρήση ένος πλήρες ευρετήριου συναλλαγών (προεπιλογή: 0) </translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Μέγιστος buffer λήψης ανά σύνδεση, <n>*1000 bytes (προεπιλογή: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Μέγιστος buffer αποστολής ανά σύνδεση, <n>*1000 bytes (προεπιλογή: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Μονο αποδοχη αλυσίδας μπλοκ που ταιριάζει με τα ενσωματωμένα σημεία ελέγχου (προεπιλογή: 1) </translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation> Συνδέση μόνο σε κόμβους του δικτύου <net> (IPv4, IPv6 ή Tor) </translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Χρονοσφραγίδα πληροφοριών εντοπισμού σφαλμάτων</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Colossus Wiki for SSL setup instructions)</source>
<translation>Ρυθμίσεις SSL: (ανατρέξτε στο Colossus Wiki για οδηγίες ρυθμίσεων SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Επιλέξτε την έκδοση του διαμεσολαβητη για να χρησιμοποιήσετε (4-5 , προεπιλογή: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στην κονσόλα αντί του αρχείου debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στον debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Ορίσμος του μέγιστου μέγεθος block σε bytes (προεπιλογή: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Ορίστε το μέγιστο μέγεθος block σε bytes (προεπιλογή: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Συρρίκνωση του αρχείο debug.log κατα την εκκίνηση του πελάτη (προεπιλογή: 1 όταν δεν-debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation>Η υπογραφή συναλλαγής απέτυχε </translation>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Ορισμός λήξης χρονικού ορίου σε χιλιοστά του δευτερολέπτου(προεπιλογή:5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Λάθος Συστήματος:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation>Το ποσό της συναλλαγής είναι πολύ μικρο </translation>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation>Τα ποσά των συναλλαγών πρέπει να είναι θετικα</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation>Η συναλλαγή ειναι πολύ μεγάλη </translation>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:1)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Χρήση διακομιστή μεσολάβησης για την επίτευξη των Tor κρυμμένων υπηρεσιων (προεπιλογή: ίδιο με το-proxy) </translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Όνομα χρήστη για τις συνδέσεις JSON-RPC</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Προειδοποίηση</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Προειδοποίηση: Αυτή η έκδοση είναι ξεπερασμένη, απαιτείται αναβάθμιση </translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>Θα πρέπει να ξαναχτίστουν οι βάσεις δεδομένων που χρησιμοποιούντε-Αναδημιουργία αλλάγων-txindex </translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>Το αρχειο wallet.dat ειναι διεφθαρμένο, η διάσωση απέτυχε</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Κωδικός για τις συνδέσεις JSON-RPC</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Αποδοχή συνδέσεων JSON-RPC από συγκεκριμένη διεύθυνση IP</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Αποστολή εντολών στον κόμβο <ip> (προεπιλογή: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Αναβάθμισε το πορτοφόλι στην τελευταία έκδοση</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Όριο πλήθους κλειδιών pool <n> (προεπιλογή: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Επανέλεγχος της αλυσίδας μπλοκ για απούσες συναλλαγές</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Χρήση του OpenSSL (https) για συνδέσεις JSON-RPC</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Αρχείο πιστοποιητικού του διακομιστή (προεπιλογή: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Προσωπικό κλειδί του διακομιστή (προεπιλογή: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Αποδεκτά κρυπτογραφήματα (προεπιλογή: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Αυτό το κείμενο βοήθειας</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή (bind returned error %d, %s) </translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Σύνδεση μέσω διαμεσολαβητή socks</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Να επιτρέπονται οι έλεγχοι DNS για προσθήκη και σύνδεση κόμβων</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Φόρτωση διευθύνσεων...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Σφάλμα φόρτωσης wallet.dat: Κατεστραμμένο Πορτοφόλι</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Colossus</source>
<translation>Σφάλμα φόρτωσης wallet.dat: Το Πορτοφόλι απαιτεί μια νεότερη έκδοση του Colossus</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Colossus to complete</source>
<translation>Απαιτείται η επανεγγραφή του Πορτοφολιού, η οποία θα ολοκληρωθεί στην επανεκκίνηση του Colossus</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Σφάλμα φόρτωσης αρχείου wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Άγνωστo δίκτυο ορίζεται σε onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Άγνωστo δίκτυο ορίζεται: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Λάθος ποσότητα</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Ανεπαρκές κεφάλαιο</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Φόρτωση ευρετηρίου μπλοκ...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Προσέθεσε ένα κόμβο για σύνδεση και προσπάθησε να κρατήσεις την σύνδεση ανοιχτή</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Colossus is probably already running.</source><|fim▁hole|> <message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Αμοιβή ανά KB που θα προστίθεται στις συναλλαγές που στέλνεις</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Φόρτωση πορτοφολιού...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Δεν μπορώ να υποβαθμίσω το πορτοφόλι</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Ανίχνευση...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Η φόρτωση ολοκληρώθηκε</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>Χρήση της %s επιλογής</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Σφάλμα</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Πρέπει να βάλεις ένα κωδικό στο αρχείο παραμέτρων: %s
Εάν το αρχείο δεν υπάρχει, δημιούργησε το με δικαιώματα μόνο για ανάγνωση από τον δημιουργό</translation>
</message>
</context>
</TS><|fim▁end|> | <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή. Το Colossus είναι πιθανώς ήδη ενεργό.</translation>
</message> |
<|file_name|>main-menu.js<|end_file_name|><|fim▁begin|>import {
domReady,
transitionFromClass,
transitionToClass,
readFileAsText
} from '../utils';
import Spinner from './spinner';
import { EventEmitter } from 'events';
export default class MainMenu extends EventEmitter {
constructor() {
super();
this.allowHide = false;
this._spinner = new Spinner();
domReady.then(() => {
this.container = document.querySelector('.main-menu');
this._loadFileInput = document.querySelector('.load-file-input');
this._pasteInput = document.querySelector('.paste-input');
this._loadDemoBtn = document.querySelector('.load-demo');
this._loadFileBtn = document.querySelector('.load-file');
this._pasteLabel = document.querySelector('.menu-input');
this._overlay = this.container.querySelector('.overlay');<|fim▁hole|> this._menu = this.container.querySelector('.menu');
document.querySelector('.menu-btn')
.addEventListener('click', e => this._onMenuButtonClick(e));
this._overlay.addEventListener('click', e => this._onOverlayClick(e));
this._loadFileBtn.addEventListener('click', e => this._onLoadFileClick(e));
this._loadDemoBtn.addEventListener('click', e => this._onLoadDemoClick(e));
this._loadFileInput.addEventListener('change', e => this._onFileInputChange(e));
this._pasteInput.addEventListener('input', e => this._onTextInputChange(e));
});
}
show() {
this.container.classList.remove('hidden');
transitionFromClass(this._overlay, 'hidden');
transitionFromClass(this._menu, 'hidden');
}
hide() {
if (!this.allowHide) return;
this.stopSpinner();
this.container.classList.add('hidden');
transitionToClass(this._overlay, 'hidden');
transitionToClass(this._menu, 'hidden');
}
stopSpinner() {
this._spinner.hide();
}
showFilePicker() {
this._loadFileInput.click();
}
_onOverlayClick(event) {
event.preventDefault();
this.hide();
}
_onMenuButtonClick(event) {
event.preventDefault();
this.show();
}
_onTextInputChange(event) {
const val = this._pasteInput.value.trim();
if (val.includes('</svg>')) {
this._pasteInput.value = '';
this._pasteInput.blur();
this._pasteLabel.appendChild(this._spinner.container);
this._spinner.show();
this.emit('svgDataLoad', {
data: val,
filename: 'image.svg'
});
}
}
_onLoadFileClick(event) {
event.preventDefault();
event.target.blur();
this.showFilePicker();
}
async _onFileInputChange(event) {
const file = this._loadFileInput.files[0];
if (!file) return;
this._loadFileBtn.appendChild(this._spinner.container);
this._spinner.show();
this.emit('svgDataLoad', {
data: await readFileAsText(file),
filename: file.name
});
}
async _onLoadDemoClick(event) {
event.preventDefault();
event.target.blur();
this._loadDemoBtn.appendChild(this._spinner.container);
this._spinner.show();
try {
this.emit('svgDataLoad', {
data: await fetch('test-svgs/car-lite.svg').then(r => r.text()),
filename: 'car-lite.svg'
});
}
catch (err) {
// This extra scope is working around a babel-minify bug.
// It's fixed in Babel 7.
{
this.stopSpinner();
let error;
if ('serviceWorker' in navigator && navigator.serviceWorker.controller) {
error = Error("Demo not available offline");
}
else {
error = Error("Couldn't fetch demo SVG");
}
this.emit('error', { error });
}
}
}
}<|fim▁end|> | |
<|file_name|>space_userpref.py<|end_file_name|><|fim▁begin|># ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import Header, Menu, Panel
from bpy.app.translations import pgettext_iface as iface_
from bpy.app.translations import contexts as i18n_contexts
def opengl_lamp_buttons(column, lamp):
split = column.row()
split.prop(lamp, "use", text="", icon='OUTLINER_OB_LAMP' if lamp.use else 'LAMP_DATA')
col = split.column()
col.active = lamp.use
row = col.row()
row.label(text="Diffuse:")
row.prop(lamp, "diffuse_color", text="")
row = col.row()
row.label(text="Specular:")
row.prop(lamp, "specular_color", text="")
col = split.column()
col.active = lamp.use
col.prop(lamp, "direction", text="")
class USERPREF_HT_header(Header):
bl_space_type = 'USER_PREFERENCES'
def draw(self, context):
layout = self.layout
layout.template_header()
userpref = context.user_preferences
layout.operator_context = 'EXEC_AREA'
layout.operator("wm.save_userpref")
layout.operator_context = 'INVOKE_DEFAULT'
if userpref.active_section == 'INPUT':
layout.operator("wm.keyconfig_import")
layout.operator("wm.keyconfig_export")
elif userpref.active_section == 'ADDONS':
layout.operator("wm.addon_install", icon='FILESEL')
layout.operator("wm.addon_refresh", icon='FILE_REFRESH')
layout.menu("USERPREF_MT_addons_online_resources")
elif userpref.active_section == 'THEMES':
layout.operator("ui.reset_default_theme")
layout.operator("wm.theme_install")
class USERPREF_PT_tabs(Panel):
bl_label = ""
bl_space_type = 'USER_PREFERENCES'
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
def draw(self, context):
layout = self.layout
userpref = context.user_preferences
layout.prop(userpref, "active_section", expand=True)
class USERPREF_MT_interaction_presets(Menu):
bl_label = "Presets"
preset_subdir = "interaction"
preset_operator = "script.execute_preset"
draw = Menu.draw_preset
class USERPREF_MT_appconfigs(Menu):
bl_label = "AppPresets"
preset_subdir = "keyconfig"
preset_operator = "wm.appconfig_activate"
def draw(self, context):
self.layout.operator("wm.appconfig_default", text="Blender (default)")
# now draw the presets
Menu.draw_preset(self, context)
class USERPREF_MT_splash(Menu):
bl_label = "Splash"
def draw(self, context):
layout = self.layout
split = layout.split()
row = split.row()
row.label("")
row = split.row()
row.label("Interaction:")
text = bpy.path.display_name(context.window_manager.keyconfigs.active.name)
if not text:
text = "Blender (default)"
row.menu("USERPREF_MT_appconfigs", text=text)
# only for addons
class USERPREF_MT_splash_footer(Menu):
bl_label = ""
def draw(self, context):
pass
class USERPREF_PT_interface(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Interface"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'INTERFACE')
def draw(self, context):
import sys
layout = self.layout
userpref = context.user_preferences
view = userpref.view
row = layout.row()
col = row.column()
col.label(text="Display:")
col.prop(view, "show_tooltips")
col.prop(view, "show_tooltips_python")
col.prop(view, "show_object_info", text="Object Info")
col.prop(view, "show_large_cursors")
col.prop(view, "show_view_name", text="View Name")
col.prop(view, "show_playback_fps", text="Playback FPS")
col.prop(view, "use_global_scene")
col.prop(view, "object_origin_size")
col.separator()
col.separator()
col.separator()
col.prop(view, "show_mini_axis", text="Display Mini Axis")
sub = col.column()
sub.active = view.show_mini_axis
sub.prop(view, "mini_axis_size", text="Size")
sub.prop(view, "mini_axis_brightness", text="Brightness")
col.separator()
if sys.platform[:3] == "win":
col.label("Warnings")
col.prop(view, "use_quit_dialog")
row.separator()
row.separator()
col = row.column()
col.label(text="View Manipulation:")
col.prop(view, "use_mouse_depth_cursor")
col.prop(view, "use_mouse_depth_navigate")
col.prop(view, "use_zoom_to_mouse")
col.prop(view, "use_rotate_around_active")
col.prop(view, "use_global_pivot")
col.prop(view, "use_camera_lock_parent")
col.separator()
col.prop(view, "use_auto_perspective")
col.prop(view, "smooth_view")
col.prop(view, "rotation_angle")
col.separator()
col.separator()
col.label(text="2D Viewports:")
col.prop(view, "view2d_grid_spacing_min", text="Minimum Grid Spacing")
col.prop(view, "timecode_style")
col.prop(view, "view_frame_type")
if (view.view_frame_type == 'SECONDS'):
col.prop(view, "view_frame_seconds")
elif (view.view_frame_type == 'KEYFRAMES'):
col.prop(view, "view_frame_keyframes")
row.separator()
row.separator()
col = row.column()
#Toolbox doesn't exist yet
#col.label(text="Toolbox:")
#col.prop(view, "show_column_layout")
#col.label(text="Open Toolbox Delay:")
#col.prop(view, "open_left_mouse_delay", text="Hold LMB")
#col.prop(view, "open_right_mouse_delay", text="Hold RMB")
col.prop(view, "show_manipulator")
sub = col.column()
sub.active = view.show_manipulator
sub.prop(view, "manipulator_size", text="Size")
sub.prop(view, "manipulator_handle_size", text="Handle Size")
sub.prop(view, "manipulator_hotspot", text="Hotspot")
col.separator()
col.separator()
col.separator()
col.label(text="Menus:")
col.prop(view, "use_mouse_over_open")
sub = col.column()
sub.active = view.use_mouse_over_open
sub.prop(view, "open_toplevel_delay", text="Top Level")
sub.prop(view, "open_sublevel_delay", text="Sub Level")
col.separator()
col.label(text="Pie Menus:")
sub = col.column(align=True)
sub.prop(view, "pie_animation_timeout")
sub.prop(view, "pie_initial_timeout")
sub.prop(view, "pie_menu_radius")
sub.prop(view, "pie_menu_threshold")
sub.prop(view, "pie_menu_confirm")
col.separator()
col.separator()
col.separator()
col.prop(view, "show_splash")
class USERPREF_PT_edit(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Edit"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'EDITING')
def draw(self, context):
layout = self.layout
userpref = context.user_preferences
edit = userpref.edit
row = layout.row()
col = row.column()
col.label(text="Link Materials To:")
col.prop(edit, "material_link", text="")
col.separator()
col.separator()
col.separator()
col.label(text="New Objects:")
col.prop(edit, "use_enter_edit_mode")
col.label(text="Align To:")
col.prop(edit, "object_align", text="")
col.separator()
col.separator()
col.separator()
col.label(text="Undo:")
col.prop(edit, "use_global_undo")
col.prop(edit, "undo_steps", text="Steps")
col.prop(edit, "undo_memory_limit", text="Memory Limit")
row.separator()
row.separator()
col = row.column()
col.label(text="Grease Pencil:")
col.prop(edit, "grease_pencil_eraser_radius", text="Eraser Radius")
col.separator()
col.prop(edit, "grease_pencil_manhattan_distance", text="Manhattan Distance")
col.prop(edit, "grease_pencil_euclidean_distance", text="Euclidean Distance")
col.separator()
col.prop(edit, "grease_pencil_default_color", text="Default Color")
col.separator()
col.prop(edit, "use_grease_pencil_simplify_stroke", text="Simplify Stroke")
col.separator()
col.separator()
col.separator()
col.separator()
col.label(text="Playback:")
col.prop(edit, "use_negative_frames")
col.separator()
col.separator()
col.separator()
col.label(text="Node Editor:")
col.prop(edit, "node_margin")
col.label(text="Animation Editors:")
col.prop(edit, "fcurve_unselected_alpha", text="F-Curve Visibility")<|fim▁hole|> row.separator()
row.separator()
col = row.column()
col.label(text="Keyframing:")
col.prop(edit, "use_visual_keying")
col.prop(edit, "use_keyframe_insert_needed", text="Only Insert Needed")
col.separator()
col.prop(edit, "use_auto_keying", text="Auto Keyframing:")
col.prop(edit, "use_auto_keying_warning")
sub = col.column()
#~ sub.active = edit.use_keyframe_insert_auto # incorrect, time-line can enable
sub.prop(edit, "use_keyframe_insert_available", text="Only Insert Available")
col.separator()
col.label(text="New F-Curve Defaults:")
col.prop(edit, "keyframe_new_interpolation_type", text="Interpolation")
col.prop(edit, "keyframe_new_handle_type", text="Handles")
col.prop(edit, "use_insertkey_xyz_to_rgb", text="XYZ to RGB")
col.separator()
col.separator()
col.separator()
col.label(text="Transform:")
col.prop(edit, "use_drag_immediately")
row.separator()
row.separator()
col = row.column()
col.prop(edit, "sculpt_paint_overlay_color", text="Sculpt Overlay Color")
col.separator()
col.separator()
col.separator()
col.label(text="Duplicate Data:")
col.prop(edit, "use_duplicate_mesh", text="Mesh")
col.prop(edit, "use_duplicate_surface", text="Surface")
col.prop(edit, "use_duplicate_curve", text="Curve")
col.prop(edit, "use_duplicate_text", text="Text")
col.prop(edit, "use_duplicate_metaball", text="Metaball")
col.prop(edit, "use_duplicate_armature", text="Armature")
col.prop(edit, "use_duplicate_lamp", text="Lamp")
col.prop(edit, "use_duplicate_material", text="Material")
col.prop(edit, "use_duplicate_texture", text="Texture")
#col.prop(edit, "use_duplicate_fcurve", text="F-Curve")
col.prop(edit, "use_duplicate_action", text="Action")
col.prop(edit, "use_duplicate_particle", text="Particle")
class USERPREF_PT_system(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "System"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'SYSTEM')
def draw(self, context):
import sys
layout = self.layout
userpref = context.user_preferences
system = userpref.system
split = layout.split()
# 1. Column
column = split.column()
colsplit = column.split(percentage=0.85)
col = colsplit.column()
col.label(text="General:")
col.prop(system, "dpi")
col.label("Virtual Pixel Mode:")
col.prop(system, "virtual_pixel_mode", text="")
col.separator()
col.prop(system, "frame_server_port")
col.prop(system, "scrollback", text="Console Scrollback")
col.separator()
col.label(text="Sound:")
col.row().prop(system, "audio_device", expand=False)
sub = col.column()
sub.active = system.audio_device != 'NONE' and system.audio_device != 'Null'
#sub.prop(system, "use_preview_images")
sub.prop(system, "audio_channels", text="Channels")
sub.prop(system, "audio_mixing_buffer", text="Mixing Buffer")
sub.prop(system, "audio_sample_rate", text="Sample Rate")
sub.prop(system, "audio_sample_format", text="Sample Format")
col.separator()
col.label(text="Screencast:")
col.prop(system, "screencast_fps")
col.prop(system, "screencast_wait_time")
col.separator()
if userpref.addons.find('cycles') != -1:
userpref.addons['cycles'].preferences.draw_impl(col, context)
if hasattr(system, "opensubdiv_compute_type"):
col.label(text="OpenSubdiv compute:")
col.row().prop(system, "opensubdiv_compute_type", text="")
# 2. Column
column = split.column()
colsplit = column.split(percentage=0.85)
col = colsplit.column()
col.label(text="OpenGL:")
col.prop(system, "gl_clip_alpha", slider=True)
col.prop(system, "use_mipmaps")
col.prop(system, "use_gpu_mipmap")
col.prop(system, "use_16bit_textures")
col.separator()
col.label(text="Selection")
col.prop(system, "select_method", text="")
col.separator()
col.label(text="Anisotropic Filtering")
col.prop(system, "anisotropic_filter", text="")
col.separator()
col.label(text="Window Draw Method:")
col.prop(system, "window_draw_method", text="")
col.prop(system, "multi_sample", text="")
if sys.platform == "linux" and system.multi_sample != 'NONE':
col.label(text="Might fail for Mesh editing selection!")
col.separator()
col.prop(system, "use_region_overlap")
col.separator()
col.label(text="Text Draw Options:")
col.prop(system, "use_text_antialiasing")
col.separator()
col.label(text="Textures:")
col.prop(system, "gl_texture_limit", text="Limit Size")
col.prop(system, "texture_time_out", text="Time Out")
col.prop(system, "texture_collection_rate", text="Collection Rate")
col.separator()
col.label(text="Images Draw Method:")
col.prop(system, "image_draw_method", text="")
col.separator()
col.label(text="Sequencer/Clip Editor:")
# currently disabled in the code
# col.prop(system, "prefetch_frames")
col.prop(system, "memory_cache_limit")
# 3. Column
column = split.column()
column.label(text="Solid OpenGL lights:")
split = column.split(percentage=0.1)
split.label()
split.label(text="Colors:")
split.label(text="Direction:")
lamp = system.solid_lights[0]
opengl_lamp_buttons(column, lamp)
lamp = system.solid_lights[1]
opengl_lamp_buttons(column, lamp)
lamp = system.solid_lights[2]
opengl_lamp_buttons(column, lamp)
column.separator()
column.label(text="Color Picker Type:")
column.row().prop(system, "color_picker_type", text="")
column.separator()
column.prop(system, "use_weight_color_range", text="Custom Weight Paint Range")
sub = column.column()
sub.active = system.use_weight_color_range
sub.template_color_ramp(system, "weight_color_range", expand=True)
column.separator()
column.prop(system, "font_path_ui")
column.prop(system, "font_path_ui_mono")
if bpy.app.build_options.international:
column.prop(system, "use_international_fonts")
if system.use_international_fonts:
column.prop(system, "language")
row = column.row()
row.label(text="Translate:", text_ctxt=i18n_contexts.id_windowmanager)
row = column.row(align=True)
row.prop(system, "use_translate_interface", text="Interface", toggle=True)
row.prop(system, "use_translate_tooltips", text="Tooltips", toggle=True)
row.prop(system, "use_translate_new_dataname", text="New Data", toggle=True)
class USERPREF_MT_interface_theme_presets(Menu):
bl_label = "Presets"
preset_subdir = "interface_theme"
preset_operator = "script.execute_preset"
preset_type = 'XML'
preset_xml_map = (
("user_preferences.themes[0]", "Theme"),
("user_preferences.ui_styles[0]", "ThemeStyle"),
)
draw = Menu.draw_preset
class USERPREF_PT_theme(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Themes"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
# not essential, hard-coded UI delimiters for the theme layout
ui_delimiters = {
'VIEW_3D': {
"text_grease_pencil",
"text_keyframe",
"speaker",
"freestyle_face_mark",
"split_normal",
"bone_solid",
"paint_curve_pivot",
},
'GRAPH_EDITOR': {
"handle_vertex_select",
},
'IMAGE_EDITOR': {
"paint_curve_pivot",
},
'NODE_EDITOR': {
"layout_node",
},
'CLIP_EDITOR': {
"handle_vertex_select",
}
}
@staticmethod
def _theme_generic(split, themedata, theme_area):
col = split.column()
def theme_generic_recurse(data):
col.label(data.rna_type.name)
row = col.row()
subsplit = row.split(percentage=0.95)
padding1 = subsplit.split(percentage=0.15)
padding1.column()
subsplit = row.split(percentage=0.85)
padding2 = subsplit.split(percentage=0.15)
padding2.column()
colsub_pair = padding1.column(), padding2.column()
props_type = {}
for i, prop in enumerate(data.rna_type.properties):
if prop.identifier == "rna_type":
continue
props_type.setdefault((prop.type, prop.subtype), []).append(prop)
th_delimiters = USERPREF_PT_theme.ui_delimiters.get(theme_area)
for props_type, props_ls in sorted(props_type.items()):
if props_type[0] == 'POINTER':
for i, prop in enumerate(props_ls):
theme_generic_recurse(getattr(data, prop.identifier))
else:
if th_delimiters is None:
# simple, no delimiters
for i, prop in enumerate(props_ls):
colsub_pair[i % 2].row().prop(data, prop.identifier)
else:
# add hard coded delimiters
i = 0
for prop in props_ls:
colsub = colsub_pair[i]
colsub.row().prop(data, prop.identifier)
i = (i + 1) % 2
if prop.identifier in th_delimiters:
if i:
colsub = colsub_pair[1]
colsub.row().label("")
colsub_pair[0].row().label("")
colsub_pair[1].row().label("")
i = 0
theme_generic_recurse(themedata)
@staticmethod
def _theme_widget_style(layout, widget_style):
row = layout.row()
subsplit = row.split(percentage=0.95)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(widget_style, "outline")
colsub.row().prop(widget_style, "item", slider=True)
colsub.row().prop(widget_style, "inner", slider=True)
colsub.row().prop(widget_style, "inner_sel", slider=True)
subsplit = row.split(percentage=0.85)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(widget_style, "text")
colsub.row().prop(widget_style, "text_sel")
colsub.prop(widget_style, "show_shaded")
subsub = colsub.column(align=True)
subsub.active = widget_style.show_shaded
subsub.prop(widget_style, "shadetop")
subsub.prop(widget_style, "shadedown")
layout.separator()
@staticmethod
def _ui_font_style(layout, font_style):
split = layout.split()
col = split.column()
col.label(text="Kerning Style:")
col.row().prop(font_style, "font_kerning_style", expand=True)
col.prop(font_style, "points")
col = split.column()
col.label(text="Shadow Offset:")
col.prop(font_style, "shadow_offset_x", text="X")
col.prop(font_style, "shadow_offset_y", text="Y")
col = split.column()
col.prop(font_style, "shadow")
col.prop(font_style, "shadow_alpha")
col.prop(font_style, "shadow_value")
layout.separator()
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'THEMES')
def draw(self, context):
layout = self.layout
theme = context.user_preferences.themes[0]
split_themes = layout.split(percentage=0.2)
sub = split_themes.column()
sub.label(text="Presets:")
subrow = sub.row(align=True)
subrow.menu("USERPREF_MT_interface_theme_presets", text=USERPREF_MT_interface_theme_presets.bl_label)
subrow.operator("wm.interface_theme_preset_add", text="", icon='ZOOMIN')
subrow.operator("wm.interface_theme_preset_add", text="", icon='ZOOMOUT').remove_active = True
sub.separator()
sub.prop(theme, "theme_area", expand=True)
split = layout.split(percentage=0.4)
layout.separator()
layout.separator()
split = split_themes.split()
if theme.theme_area == 'USER_INTERFACE':
col = split.column()
ui = theme.user_interface
col.label(text="Regular:")
self._theme_widget_style(col, ui.wcol_regular)
col.label(text="Tool:")
self._theme_widget_style(col, ui.wcol_tool)
col.label(text="Radio Buttons:")
self._theme_widget_style(col, ui.wcol_radio)
col.label(text="Text:")
self._theme_widget_style(col, ui.wcol_text)
col.label(text="Option:")
self._theme_widget_style(col, ui.wcol_option)
col.label(text="Toggle:")
self._theme_widget_style(col, ui.wcol_toggle)
col.label(text="Number Field:")
self._theme_widget_style(col, ui.wcol_num)
col.label(text="Value Slider:")
self._theme_widget_style(col, ui.wcol_numslider)
col.label(text="Box:")
self._theme_widget_style(col, ui.wcol_box)
col.label(text="Menu:")
self._theme_widget_style(col, ui.wcol_menu)
col.label(text="Pie Menu:")
self._theme_widget_style(col, ui.wcol_pie_menu)
col.label(text="Pulldown:")
self._theme_widget_style(col, ui.wcol_pulldown)
col.label(text="Menu Back:")
self._theme_widget_style(col, ui.wcol_menu_back)
col.label(text="Tooltip:")
self._theme_widget_style(col, ui.wcol_tooltip)
col.label(text="Menu Item:")
self._theme_widget_style(col, ui.wcol_menu_item)
col.label(text="Scroll Bar:")
self._theme_widget_style(col, ui.wcol_scroll)
col.label(text="Progress Bar:")
self._theme_widget_style(col, ui.wcol_progress)
col.label(text="List Item:")
self._theme_widget_style(col, ui.wcol_list_item)
ui_state = theme.user_interface.wcol_state
col.label(text="State:")
row = col.row()
subsplit = row.split(percentage=0.95)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui_state, "inner_anim")
colsub.row().prop(ui_state, "inner_anim_sel")
colsub.row().prop(ui_state, "inner_driven")
colsub.row().prop(ui_state, "inner_driven_sel")
subsplit = row.split(percentage=0.85)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui_state, "inner_key")
colsub.row().prop(ui_state, "inner_key_sel")
colsub.row().prop(ui_state, "blend")
col.separator()
col.separator()
col.label("Styles:")
row = col.row()
subsplit = row.split(percentage=0.95)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "menu_shadow_fac")
subsplit = row.split(percentage=0.85)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "menu_shadow_width")
row = col.row()
subsplit = row.split(percentage=0.95)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "icon_alpha")
subsplit = row.split(percentage=0.85)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "widget_emboss")
col.separator()
col.separator()
col.label("Axis Colors:")
row = col.row()
subsplit = row.split(percentage=0.95)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "axis_x")
colsub.row().prop(ui, "axis_y")
colsub.row().prop(ui, "axis_z")
subsplit = row.split(percentage=0.85)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
layout.separator()
layout.separator()
elif theme.theme_area == 'BONE_COLOR_SETS':
col = split.column()
for i, ui in enumerate(theme.bone_color_sets):
col.label(text=iface_("Color Set %d:") % (i + 1), translate=False) # i starts from 0
row = col.row()
subsplit = row.split(percentage=0.95)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "normal")
colsub.row().prop(ui, "select")
colsub.row().prop(ui, "active")
subsplit = row.split(percentage=0.85)
padding = subsplit.split(percentage=0.15)
colsub = padding.column()
colsub = padding.column()
colsub.row().prop(ui, "show_colored_constraints")
elif theme.theme_area == 'STYLE':
col = split.column()
style = context.user_preferences.ui_styles[0]
col.label(text="Panel Title:")
self._ui_font_style(col, style.panel_title)
col.separator()
col.label(text="Widget:")
self._ui_font_style(col, style.widget)
col.separator()
col.label(text="Widget Label:")
self._ui_font_style(col, style.widget_label)
else:
self._theme_generic(split, getattr(theme, theme.theme_area.lower()), theme.theme_area)
class USERPREF_PT_file(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Files"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'FILES')
def draw(self, context):
layout = self.layout
userpref = context.user_preferences
paths = userpref.filepaths
system = userpref.system
split = layout.split(percentage=0.7)
col = split.column()
col.label(text="File Paths:")
colsplit = col.split(percentage=0.95)
col1 = colsplit.split(percentage=0.3)
sub = col1.column()
sub.label(text="Fonts:")
sub.label(text="Textures:")
sub.label(text="Render Output:")
sub.label(text="Scripts:")
sub.label(text="Sounds:")
sub.label(text="Temp:")
sub.label(text="Render Cache:")
sub.label(text="I18n Branches:")
sub.label(text="Image Editor:")
sub.label(text="Animation Player:")
sub = col1.column()
sub.prop(paths, "font_directory", text="")
sub.prop(paths, "texture_directory", text="")
sub.prop(paths, "render_output_directory", text="")
sub.prop(paths, "script_directory", text="")
sub.prop(paths, "sound_directory", text="")
sub.prop(paths, "temporary_directory", text="")
sub.prop(paths, "render_cache_directory", text="")
sub.prop(paths, "i18n_branches_directory", text="")
sub.prop(paths, "image_editor", text="")
subsplit = sub.split(percentage=0.3)
subsplit.prop(paths, "animation_player_preset", text="")
subsplit.prop(paths, "animation_player", text="")
col.separator()
col.separator()
colsplit = col.split(percentage=0.95)
sub = colsplit.column()
row = sub.split(percentage=0.3)
row.label(text="Auto Execution:")
row.prop(system, "use_scripts_auto_execute")
if system.use_scripts_auto_execute:
box = sub.box()
row = box.row()
row.label(text="Excluded Paths:")
row.operator("wm.userpref_autoexec_path_add", text="", icon='ZOOMIN', emboss=False)
for i, path_cmp in enumerate(userpref.autoexec_paths):
row = box.row()
row.prop(path_cmp, "path", text="")
row.prop(path_cmp, "use_glob", text="", icon='FILTER')
row.operator("wm.userpref_autoexec_path_remove", text="", icon='X', emboss=False).index = i
col = split.column()
col.label(text="Save & Load:")
col.prop(paths, "use_relative_paths")
col.prop(paths, "use_file_compression")
col.prop(paths, "use_load_ui")
col.prop(paths, "use_filter_files")
col.prop(paths, "show_hidden_files_datablocks")
col.prop(paths, "hide_recent_locations")
col.prop(paths, "hide_system_bookmarks")
col.prop(paths, "show_thumbnails")
col.separator()
col.prop(paths, "save_version")
col.prop(paths, "recent_files")
col.prop(paths, "use_save_preview_images")
col.separator()
col.label(text="Auto Save:")
col.prop(paths, "use_keep_session")
col.prop(paths, "use_auto_save_temporary_files")
sub = col.column()
sub.active = paths.use_auto_save_temporary_files
sub.prop(paths, "auto_save_time", text="Timer (mins)")
col.separator()
col.label(text="Text Editor:")
col.prop(system, "use_tabs_as_spaces")
colsplit = col.split(percentage=0.95)
col1 = colsplit.split(percentage=0.3)
sub = col1.column()
sub.label(text="Author:")
sub = col1.column()
sub.prop(system, "author", text="")
class USERPREF_MT_ndof_settings(Menu):
# accessed from the window key-bindings in C (only)
bl_label = "3D Mouse Settings"
def draw(self, context):
layout = self.layout
input_prefs = context.user_preferences.inputs
is_view3d = context.space_data.type == 'VIEW_3D'
layout.prop(input_prefs, "ndof_sensitivity")
layout.prop(input_prefs, "ndof_orbit_sensitivity")
layout.prop(input_prefs, "ndof_deadzone")
if is_view3d:
layout.separator()
layout.prop(input_prefs, "ndof_show_guide")
layout.separator()
layout.label(text="Orbit style")
layout.row().prop(input_prefs, "ndof_view_navigate_method", text="")
layout.row().prop(input_prefs, "ndof_view_rotate_method", text="")
layout.separator()
layout.label(text="Orbit options")
layout.prop(input_prefs, "ndof_rotx_invert_axis")
layout.prop(input_prefs, "ndof_roty_invert_axis")
layout.prop(input_prefs, "ndof_rotz_invert_axis")
# view2d use pan/zoom
layout.separator()
layout.label(text="Pan options")
layout.prop(input_prefs, "ndof_panx_invert_axis")
layout.prop(input_prefs, "ndof_pany_invert_axis")
layout.prop(input_prefs, "ndof_panz_invert_axis")
layout.prop(input_prefs, "ndof_pan_yz_swap_axis")
layout.label(text="Zoom options")
layout.prop(input_prefs, "ndof_zoom_invert")
if is_view3d:
layout.separator()
layout.label(text="Fly/Walk options")
layout.prop(input_prefs, "ndof_fly_helicopter", icon='NDOF_FLY')
layout.prop(input_prefs, "ndof_lock_horizon", icon='NDOF_DOM')
class USERPREF_MT_keyconfigs(Menu):
bl_label = "KeyPresets"
preset_subdir = "keyconfig"
preset_operator = "wm.keyconfig_activate"
def draw(self, context):
props = self.layout.operator("wm.context_set_value", text="Blender (default)")
props.data_path = "window_manager.keyconfigs.active"
props.value = "context.window_manager.keyconfigs.default"
# now draw the presets
Menu.draw_preset(self, context)
class USERPREF_PT_input(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Input"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'INPUT')
@staticmethod
def draw_input_prefs(inputs, layout):
import sys
# General settings
row = layout.row()
col = row.column()
sub = col.column()
sub.label(text="Presets:")
subrow = sub.row(align=True)
subrow.menu("USERPREF_MT_interaction_presets", text=bpy.types.USERPREF_MT_interaction_presets.bl_label)
subrow.operator("wm.interaction_preset_add", text="", icon='ZOOMIN')
subrow.operator("wm.interaction_preset_add", text="", icon='ZOOMOUT').remove_active = True
sub.separator()
sub.label(text="Mouse:")
sub1 = sub.column()
sub1.active = (inputs.select_mouse == 'RIGHT')
sub1.prop(inputs, "use_mouse_emulate_3_button")
sub.prop(inputs, "use_mouse_continuous")
sub.prop(inputs, "drag_threshold")
sub.prop(inputs, "tweak_threshold")
sub.label(text="Select With:")
sub.row().prop(inputs, "select_mouse", expand=True)
sub = col.column()
sub.label(text="Double Click:")
sub.prop(inputs, "mouse_double_click_time", text="Speed")
sub.separator()
sub.prop(inputs, "use_emulate_numpad")
sub.separator()
sub.label(text="Orbit Style:")
sub.row().prop(inputs, "view_rotate_method", expand=True)
sub.separator()
sub.label(text="Zoom Style:")
sub.row().prop(inputs, "view_zoom_method", text="")
if inputs.view_zoom_method in {'DOLLY', 'CONTINUE'}:
sub.row().prop(inputs, "view_zoom_axis", expand=True)
sub.prop(inputs, "invert_mouse_zoom", text="Invert Mouse Zoom Direction")
#sub.prop(inputs, "use_mouse_mmb_paste")
#col.separator()
sub = col.column()
sub.prop(inputs, "invert_zoom_wheel", text="Invert Wheel Zoom Direction")
#sub.prop(view, "wheel_scroll_lines", text="Scroll Lines")
if sys.platform == "darwin":
sub = col.column()
sub.prop(inputs, "use_trackpad_natural", text="Natural Trackpad Direction")
col.separator()
sub = col.column()
sub.label(text="View Navigation:")
sub.row().prop(inputs, "navigation_mode", expand=True)
if inputs.navigation_mode == 'WALK':
walk = inputs.walk_navigation
sub.prop(walk, "use_mouse_reverse")
sub.prop(walk, "mouse_speed")
sub.prop(walk, "teleport_time")
sub = col.column(align=True)
sub.prop(walk, "walk_speed")
sub.prop(walk, "walk_speed_factor")
sub.separator()
sub.prop(walk, "use_gravity")
sub = col.column(align=True)
sub.active = walk.use_gravity
sub.prop(walk, "view_height")
sub.prop(walk, "jump_height")
if inputs.use_ndof:
col.separator()
col.label(text="NDOF Device:")
sub = col.column(align=True)
sub.prop(inputs, "ndof_sensitivity", text="NDOF Sensitivity")
sub.prop(inputs, "ndof_orbit_sensitivity", text="NDOF Orbit Sensitivity")
sub.prop(inputs, "ndof_deadzone", text="NDOF Deadzone")
sub = col.column(align=True)
sub.row().prop(inputs, "ndof_view_navigate_method", expand=True)
sub.row().prop(inputs, "ndof_view_rotate_method", expand=True)
row.separator()
def draw(self, context):
from rna_keymap_ui import draw_keymaps
layout = self.layout
#import time
#start = time.time()
userpref = context.user_preferences
inputs = userpref.inputs
split = layout.split(percentage=0.25)
# Input settings
self.draw_input_prefs(inputs, split)
# Keymap Settings
draw_keymaps(context, split)
#print("runtime", time.time() - start)
class USERPREF_MT_addons_online_resources(Menu):
bl_label = "Online Resources"
# menu to open web-pages with addons development guides
def draw(self, context):
layout = self.layout
layout.operator(
"wm.url_open", text="Add-ons Catalog", icon='URL',
).url = "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts"
layout.separator()
layout.operator(
"wm.url_open", text="How to share your add-on", icon='URL',
).url = "http://wiki.blender.org/index.php/Dev:Py/Sharing"
layout.operator(
"wm.url_open", text="Add-on Guidelines", icon='URL',
).url = "http://wiki.blender.org/index.php/Dev:2.5/Py/Scripts/Guidelines/Addons"
layout.operator(
"wm.url_open", text="API Concepts", icon='URL',
).url = bpy.types.WM_OT_doc_view._prefix + "/info_quickstart.html"
layout.operator("wm.url_open", text="Add-on Tutorial", icon='URL',
).url = "http://www.blender.org/api/blender_python_api_current/info_tutorial_addon.html"
class USERPREF_PT_addons(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Add-ons"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
_support_icon_mapping = {
'OFFICIAL': 'FILE_BLEND',
'COMMUNITY': 'POSE_DATA',
'TESTING': 'MOD_EXPLODE',
}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'ADDONS')
@staticmethod
def is_user_addon(mod, user_addon_paths):
import os
if not user_addon_paths:
for path in (bpy.utils.script_path_user(),
bpy.utils.script_path_pref()):
if path is not None:
user_addon_paths.append(os.path.join(path, "addons"))
for path in user_addon_paths:
if bpy.path.is_subdir(mod.__file__, path):
return True
return False
@staticmethod
def draw_error(layout, message):
lines = message.split("\n")
box = layout.box()
sub = box.row()
sub.label(lines[0])
sub.label(icon='ERROR')
for l in lines[1:]:
box.label(l)
def draw(self, context):
import os
import addon_utils
layout = self.layout
userpref = context.user_preferences
used_ext = {ext.module for ext in userpref.addons}
userpref_addons_folder = os.path.join(userpref.filepaths.script_directory, "addons")
scripts_addons_folder = bpy.utils.user_resource('SCRIPTS', "addons")
# collect the categories that can be filtered on
addons = [(mod, addon_utils.module_bl_info(mod)) for mod in addon_utils.modules(refresh=False)]
split = layout.split(percentage=0.2)
col = split.column()
col.prop(context.window_manager, "addon_search", text="", icon='VIEWZOOM')
col.label(text="Supported Level")
col.prop(context.window_manager, "addon_support", expand=True)
col.label(text="Categories")
col.prop(context.window_manager, "addon_filter", expand=True)
col = split.column()
# set in addon_utils.modules_refresh()
if addon_utils.error_duplicates:
self.draw_error(col,
"Multiple addons using the same name found!\n"
"likely a problem with the script search path.\n"
"(see console for details)",
)
if addon_utils.error_encoding:
self.draw_error(col,
"One or more addons do not have UTF-8 encoding\n"
"(see console for details)",
)
filter = context.window_manager.addon_filter
search = context.window_manager.addon_search.lower()
support = context.window_manager.addon_support
# initialized on demand
user_addon_paths = []
for mod, info in addons:
module_name = mod.__name__
is_enabled = module_name in used_ext
if info["support"] not in support:
continue
# check if addon should be visible with current filters
if ((filter == "All") or
(filter == info["category"]) or
(filter == "Enabled" and is_enabled) or
(filter == "Disabled" and not is_enabled) or
(filter == "User" and (mod.__file__.startswith((scripts_addons_folder, userpref_addons_folder))))
):
if search and search not in info["name"].lower():
if info["author"]:
if search not in info["author"].lower():
continue
else:
continue
# Addon UI Code
col_box = col.column()
box = col_box.box()
colsub = box.column()
row = colsub.row(align=True)
row.operator(
"wm.addon_expand",
icon='TRIA_DOWN' if info["show_expanded"] else 'TRIA_RIGHT',
emboss=False,
).module = module_name
row.operator(
"wm.addon_disable" if is_enabled else "wm.addon_enable",
icon='CHECKBOX_HLT' if is_enabled else 'CHECKBOX_DEHLT', text="",
emboss=False,
).module = module_name
sub = row.row()
sub.active = is_enabled
sub.label(text='%s: %s' % (info["category"], info["name"]))
if info["warning"]:
sub.label(icon='ERROR')
# icon showing support level.
sub.label(icon=self._support_icon_mapping.get(info["support"], 'QUESTION'))
# Expanded UI (only if additional info is available)
if info["show_expanded"]:
if info["description"]:
split = colsub.row().split(percentage=0.15)
split.label(text="Description:")
split.label(text=info["description"])
if info["location"]:
split = colsub.row().split(percentage=0.15)
split.label(text="Location:")
split.label(text=info["location"])
if mod:
split = colsub.row().split(percentage=0.15)
split.label(text="File:")
split.label(text=mod.__file__, translate=False)
if info["author"]:
split = colsub.row().split(percentage=0.15)
split.label(text="Author:")
split.label(text=info["author"], translate=False)
if info["version"]:
split = colsub.row().split(percentage=0.15)
split.label(text="Version:")
split.label(text='.'.join(str(x) for x in info["version"]), translate=False)
if info["warning"]:
split = colsub.row().split(percentage=0.15)
split.label(text="Warning:")
split.label(text=' ' + info["warning"], icon='ERROR')
user_addon = USERPREF_PT_addons.is_user_addon(mod, user_addon_paths)
tot_row = bool(info["wiki_url"]) + bool(user_addon)
if tot_row:
split = colsub.row().split(percentage=0.15)
split.label(text="Internet:")
if info["wiki_url"]:
split.operator("wm.url_open", text="Documentation", icon='HELP').url = info["wiki_url"]
split.operator("wm.url_open", text="Report a Bug", icon='URL').url = info.get(
"tracker_url",
"https://developer.blender.org/maniphest/task/edit/form/2")
if user_addon:
split.operator("wm.addon_remove", text="Remove", icon='CANCEL').module = mod.__name__
for i in range(4 - tot_row):
split.separator()
# Show addon user preferences
if is_enabled:
addon_preferences = userpref.addons[module_name].preferences
if addon_preferences is not None:
draw = getattr(addon_preferences, "draw", None)
if draw is not None:
addon_preferences_class = type(addon_preferences)
box_prefs = col_box.box()
box_prefs.label("Preferences:")
addon_preferences_class.layout = box_prefs
try:
draw(context)
except:
import traceback
traceback.print_exc()
box_prefs.label(text="Error (see console)", icon='ERROR')
del addon_preferences_class.layout
# Append missing scripts
# First collect scripts that are used but have no script file.
module_names = {mod.__name__ for mod, info in addons}
missing_modules = {ext for ext in used_ext if ext not in module_names}
if missing_modules and filter in {"All", "Enabled"}:
col.column().separator()
col.column().label(text="Missing script files")
module_names = {mod.__name__ for mod, info in addons}
for module_name in sorted(missing_modules):
is_enabled = module_name in used_ext
# Addon UI Code
box = col.column().box()
colsub = box.column()
row = colsub.row(align=True)
row.label(text="", icon='ERROR')
if is_enabled:
row.operator("wm.addon_disable", icon='CHECKBOX_HLT', text="", emboss=False).module = module_name
row.label(text=module_name, translate=False)
if __name__ == "__main__": # only for live edit.
bpy.utils.register_module(__name__)<|fim▁end|> | |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian<|fim▁hole|> println!("cargo:rustc-flags=-l irprops");
}<|fim▁end|> | // Licensed under the MIT License <LICENSE.md>
fn main() { |
<|file_name|>abort_cache_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
// Author: Spencer Kimball ([email protected])
package storage
import (
"reflect"
"testing"
"golang.org/x/net/context"
"github.com/cockroachdb/cockroach/pkg/roachpb"
"github.com/cockroachdb/cockroach/pkg/storage/engine"
"github.com/cockroachdb/cockroach/pkg/util/hlc"
"github.com/cockroachdb/cockroach/pkg/util/leaktest"
"github.com/cockroachdb/cockroach/pkg/util/stop"
"github.com/cockroachdb/cockroach/pkg/util/uuid"
)
var (
batchR = roachpb.BatchResponse{}
testTxnID *uuid.UUID
testTxnID2 *uuid.UUID
testTxnKey = []byte("a")
testTxnTimestamp = hlc.ZeroTimestamp.Add(123, 456)
testTxnPriority = int32(123)
)
func init() {
incR := roachpb.IncrementResponse{
NewValue: 1,
}
batchR.Add(&incR)
var err error
testTxnID, err = uuid.FromString("0ce61c17-5eb4-4587-8c36-dcf4062ada4c")
if err != nil {
panic(err)
}
testTxnID2, err = uuid.FromString("9855a1ef-8eb9-4c06-a106-cab1dda78a2b")
if err != nil {
panic(err)
}
}
// createTestAbortCache creates an in-memory engine and
// returns a abort cache using the supplied Range ID.
func createTestAbortCache(
t *testing.T, rangeID roachpb.RangeID, stopper *stop.Stopper,
) (*AbortCache, engine.Engine) {
return NewAbortCache(rangeID), engine.NewInMem(roachpb.Attributes{}, 1<<20, stopper)
}
// TestAbortCachePutGetClearData tests basic get & put functionality as well as
// clearing the cache.
func TestAbortCachePutGetClearData(t *testing.T) {
defer leaktest.AfterTest(t)()
stopper := stop.NewStopper()
defer stopper.Stop()
sc, e := createTestAbortCache(t, 1, stopper)
// Start with a get for an uncached id.
entry := roachpb.AbortCacheEntry{}
if aborted, readErr := sc.Get(context.Background(), e, testTxnID, &entry); aborted {
t.Errorf("expected not aborted for id %s", testTxnID)
} else if readErr != nil {
t.Fatalf("unexpected read error: %s", readErr)
}
entry = roachpb.AbortCacheEntry{
Key: testTxnKey,
Timestamp: testTxnTimestamp,
Priority: testTxnPriority,
}
if err := sc.Put(context.Background(), e, nil, testTxnID, &entry); err != nil {
t.Errorf("unexpected error putting response: %s", err)
}
tryHit := func(expAbort bool, expEntry roachpb.AbortCacheEntry) {
var actual roachpb.AbortCacheEntry
if aborted, readErr := sc.Get(context.Background(), e, testTxnID, &actual); readErr != nil {
t.Errorf("unexpected failure getting response: %s", readErr)
} else if expAbort != aborted {
t.Errorf("got aborted: %t; expected %t", aborted, expAbort)
} else if !reflect.DeepEqual(expEntry, actual) {
t.Fatalf("wanted %v, got %v", expEntry, actual)
}
}
tryHit(true, entry)
if err := sc.ClearData(e); err != nil {
t.Error(err)
}
tryHit(false, roachpb.AbortCacheEntry{})
}
// TestAbortCacheEmptyParams tests operation with empty parameters.
func TestAbortCacheEmptyParams(t *testing.T) {
defer leaktest.AfterTest(t)()
stopper := stop.NewStopper()
defer stopper.Stop()
sc, e := createTestAbortCache(t, 1, stopper)
entry := roachpb.AbortCacheEntry{
Key: testTxnKey,
Timestamp: testTxnTimestamp,
Priority: testTxnPriority,
}
// Put value for test response.
if err := sc.Put(context.Background(), e, nil, testTxnID, &entry); err != nil {
t.Errorf("unexpected error putting response: %s", err)
}
if err := sc.Put(context.Background(), e, nil, nil, &entry); err != errEmptyTxnID {
t.Errorf("expected errEmptyTxnID error putting response; got %s", err)
}
if _, err := sc.Get(context.Background(), e, nil, nil); err != errEmptyTxnID {
t.Fatalf("expected errEmptyTxnID error; got %s", err)
}
}
// TestAbortCacheCopyInto tests that entries in one cache get
// transferred correctly to another cache using CopyInto().
func TestAbortCacheCopyInto(t *testing.T) {
defer leaktest.AfterTest(t)()
stopper := stop.NewStopper()
defer stopper.Stop()
rc1, e := createTestAbortCache(t, 1, stopper)
rc2, _ := createTestAbortCache(t, 2, stopper)
entry := roachpb.AbortCacheEntry{
Key: testTxnKey,
Timestamp: testTxnTimestamp,
Priority: testTxnPriority,
}
if err := rc1.Put(context.Background(), e, nil, testTxnID, &entry); err != nil {
t.Errorf("unexpected error putting entry: %s", err)
}
// Copy the first cache into the second.
if count, err := rc1.CopyInto(e, nil, rc2.rangeID); err != nil {
t.Fatal(err)
} else if expCount := 1; count != expCount {
t.Errorf("unexpected number of copied entries: %d", count)
}
for _, cache := range []*AbortCache{rc1, rc2} {
var actual roachpb.AbortCacheEntry
// Get should return 1 for both caches.
if aborted, readErr := cache.Get(context.Background(), e, testTxnID, &actual); !aborted || readErr != nil {
t.Errorf("unexpected failure getting response from source: %t, %s", aborted, readErr)
} else if !reflect.DeepEqual(entry, actual) {
t.Fatalf("wanted %v, got %v", entry, actual)
}
}
}
// TestAbortCacheCopyFrom tests that entries in one cache get
// transferred correctly to another cache using CopyFrom().
func TestAbortCacheCopyFrom(t *testing.T) {
defer leaktest.AfterTest(t)()
stopper := stop.NewStopper()
defer stopper.Stop()
rc1, e := createTestAbortCache(t, 1, stopper)
rc2, _ := createTestAbortCache(t, 2, stopper)
entry := roachpb.AbortCacheEntry{
Key: testTxnKey,
Timestamp: testTxnTimestamp,
Priority: testTxnPriority,
}
if err := rc1.Put(context.Background(), e, nil, testTxnID, &entry); err != nil {
t.Errorf("unexpected error putting response: %s", err)
}
// Copy the first cache into the second.
if count, err := rc2.CopyFrom(context.Background(), e, nil, rc1.rangeID); err != nil {
t.Fatal(err)
} else if expCount := 1; count != expCount {
t.Errorf("unexpected number of copied entries: %d", count)
}
// Get should hit both caches.
for i, cache := range []*AbortCache{rc1, rc2} {
var actual roachpb.AbortCacheEntry
if aborted, readErr := cache.Get(context.Background(), e, testTxnID, &actual); !aborted || readErr != nil {
t.Fatalf("%d: unexpected read error: %t, %s", i, aborted, readErr)
} else if !reflect.DeepEqual(entry, actual) {
t.Fatalf("expected %v, got %v", entry, actual)
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for bangkok project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bangkok.settings")
<|fim▁hole|><|fim▁end|> | application = get_wsgi_application() |
<|file_name|>_match.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
use middle::pat_util::{PatIdMap, pat_id_map, pat_is_binding, pat_is_const};
use middle::ty;
use middle::typeck::check::demand;
use middle::typeck::check::{check_expr, check_expr_has_type, FnCtxt};
use middle::typeck::check::{instantiate_path, lookup_def};
use middle::typeck::check::{structure_of, valid_range_bounds};
use middle::typeck::infer;
use middle::typeck::require_same_types;
use collections::{HashMap, HashSet};
use syntax::ast;
use syntax::ast_util;
use syntax::parse::token;
use syntax::codemap::Span;
use syntax::print::pprust;
pub fn check_match(fcx: &FnCtxt,
expr: &ast::Expr,
discrim: &ast::Expr,
arms: &[ast::Arm]) {
let tcx = fcx.ccx.tcx;
let discrim_ty = fcx.infcx().next_ty_var();
check_expr_has_type(fcx, discrim, discrim_ty);
// Typecheck the patterns first, so that we get types for all the
// bindings.
for arm in arms.iter() {
let mut pcx = pat_ctxt {
fcx: fcx,
map: pat_id_map(&tcx.def_map, *arm.pats.get(0)),
};
for p in arm.pats.iter() { check_pat(&mut pcx, *p, discrim_ty);}
}
// The result of the match is the common supertype of all the
// arms. Start out the value as bottom, since it's the, well,
// bottom the type lattice, and we'll be moving up the lattice as
// we process each arm. (Note that any match with 0 arms is matching
// on any empty type and is therefore unreachable; should the flow
// of execution reach it, we will fail, so bottom is an appropriate
// type in that case)
let mut result_ty = ty::mk_bot();
// Now typecheck the blocks.
let mut saw_err = ty::type_is_error(discrim_ty);
for arm in arms.iter() {
let mut guard_err = false;
let mut guard_bot = false;
match arm.guard {
Some(e) => {
check_expr_has_type(fcx, e, ty::mk_bool());
let e_ty = fcx.expr_ty(e);
if ty::type_is_error(e_ty) {
guard_err = true;
}
else if ty::type_is_bot(e_ty) {
guard_bot = true;
}
},
None => ()
}
check_expr(fcx, arm.body);
let bty = fcx.node_ty(arm.body.id);
saw_err = saw_err || ty::type_is_error(bty);
if guard_err {
fcx.write_error(arm.body.id);
saw_err = true;
}
else if guard_bot {
fcx.write_bot(arm.body.id);
}
result_ty =
infer::common_supertype(
fcx.infcx(),
infer::MatchExpression(expr.span),
true, // result_ty is "expected" here
result_ty,
bty);
}
if saw_err {
result_ty = ty::mk_err();
} else if ty::type_is_bot(discrim_ty) {
result_ty = ty::mk_bot();
}
fcx.write_ty(expr.id, result_ty);
}
pub struct pat_ctxt<'a> {
pub fcx: &'a FnCtxt<'a>,
pub map: PatIdMap,
}
pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
subpats: &Option<Vec<@ast::Pat>>, expected: ty::t) {
// Typecheck the path.
let fcx = pcx.fcx;
let tcx = pcx.fcx.ccx.tcx;
let arg_types: Vec<ty::t> ;
let kind_name;
// structure_of requires type variables to be resolved.
// So when we pass in <expected>, it's an error if it
// contains type variables.
// Check to see whether this is an enum or a struct.
match *structure_of(pcx.fcx, pat.span, expected) {
ty::ty_enum(_, ref expected_substs) => {
// Lookup the enum and variant def ids:
let v_def = lookup_def(pcx.fcx, pat.span, pat.id);
match ast_util::variant_def_ids(v_def) {
Some((enm, var)) => {
// Assign the pattern the type of the *enum*, not the variant.
let enum_tpt = ty::lookup_item_type(tcx, enm);
instantiate_path(pcx.fcx,
path,
enum_tpt,
v_def,
pat.span,
pat.id);
// check that the type of the value being matched is a subtype
// of the type of the pattern:
let pat_ty = fcx.node_ty(pat.id);
demand::subtype(fcx, pat.span, expected, pat_ty);
// Get the expected types of the arguments.
arg_types = {
let vinfo =
ty::enum_variant_with_id(tcx, enm, var);
let var_tpt = ty::lookup_item_type(tcx, var);
vinfo.args.iter().map(|t| {
if var_tpt.generics.type_param_defs().len() ==
expected_substs.tps.len()
{
ty::subst(tcx, expected_substs, *t)
}
else {
*t // In this case, an error was already signaled
// anyway
}
}).collect()
};
kind_name = "variant";
}
None => {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "a structure pattern".to_owned(),
None);
fcx.write_error(pat.id);
kind_name = "[error]";
arg_types = subpats.clone()
.unwrap_or_default()
.move_iter()
.map(|_| ty::mk_err())
.collect();
}
}
}
ty::ty_struct(struct_def_id, ref expected_substs) => {
// Lookup the struct ctor def id
let s_def = lookup_def(pcx.fcx, pat.span, pat.id);
let s_def_id = ast_util::def_id_of_def(s_def);
// Assign the pattern the type of the struct.
let ctor_tpt = ty::lookup_item_type(tcx, s_def_id);
let struct_tpt = if ty::is_fn_ty(ctor_tpt.ty) {
ty::ty_param_bounds_and_ty {ty: ty::ty_fn_ret(ctor_tpt.ty),
..ctor_tpt}
} else {
ctor_tpt
};
instantiate_path(pcx.fcx,
path,
struct_tpt,
s_def,
pat.span,
pat.id);
// Check that the type of the value being matched is a subtype of
// the type of the pattern.
let pat_ty = fcx.node_ty(pat.id);
demand::subtype(fcx, pat.span, expected, pat_ty);
// Get the expected types of the arguments.
let class_fields = ty::struct_fields(
tcx, struct_def_id, expected_substs);
arg_types = class_fields.iter().map(|field| field.mt.ty).collect();
kind_name = "structure";
}
_ => {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "an enum or structure pattern".to_owned(),
None);
fcx.write_error(pat.id);
kind_name = "[error]";
arg_types = subpats.clone()
.unwrap_or_default()
.iter()
.map(|_| ty::mk_err())
.collect();
}
}
let arg_len = arg_types.len();
// Count the number of subpatterns.
let subpats_len;
match *subpats {
None => subpats_len = arg_len,
Some(ref subpats) => subpats_len = subpats.len()
}
let mut error_happened = false;
if arg_len > 0 {
// N-ary variant.
if arg_len != subpats_len {
let s = format!("this pattern has \
{npat, plural, =1{# field} other{# fields}}, \
but the corresponding {kind} has \
{narg, plural, =1{# field} other{# fields}}",
npat = subpats_len,
kind = kind_name,
narg = arg_len);
tcx.sess.span_err(pat.span, s);
error_happened = true;
}
if !error_happened {
for pats in subpats.iter() {
for (subpat, arg_ty) in pats.iter().zip(arg_types.iter()) {
check_pat(pcx, *subpat, *arg_ty);
}
}
}
} else if subpats_len > 0 {
tcx.sess.span_err(pat.span,
format!("this pattern has \
{npat, plural, =1{# field} other{# fields}}, \
but the corresponding {kind} has no fields",
npat = subpats_len,
kind = kind_name));
error_happened = true;
}
if error_happened {
for pats in subpats.iter() {
for pat in pats.iter() {
check_pat(pcx, *pat, ty::mk_err());
}
}
}
}
/// `path` is the AST path item naming the type of this struct.
/// `fields` is the field patterns of the struct pattern.
/// `class_fields` describes the type of each field of the struct.
/// `class_id` is the ID of the struct.
/// `substitutions` are the type substitutions applied to this struct type
/// (e.g. K,V in HashMap<K,V>).
/// `etc` is true if the pattern said '...' and false otherwise.
pub fn check_struct_pat_fields(pcx: &pat_ctxt,
span: Span,
path: &ast::Path,
fields: &[ast::FieldPat],
class_fields: Vec<ty::field_ty> ,
class_id: ast::DefId,
substitutions: &ty::substs,
etc: bool) {
let tcx = pcx.fcx.ccx.tcx;
// Index the class fields. The second argument in the tuple is whether the
// field has been bound yet or not.
let mut field_map = HashMap::new();
for (i, class_field) in class_fields.iter().enumerate() {
field_map.insert(class_field.name, (i, false));
}
// Typecheck each field.
let mut found_fields = HashSet::new();
for field in fields.iter() {
match field_map.find_mut(&field.ident.name) {<|fim▁hole|> Some(&(_, true)) => {
tcx.sess.span_err(span,
format!("field `{}` bound twice in pattern",
token::get_ident(field.ident)));
}
Some(&(index, ref mut used)) => {
*used = true;
let class_field = *class_fields.get(index);
let field_type = ty::lookup_field_type(tcx,
class_id,
class_field.id,
substitutions);
check_pat(pcx, field.pat, field_type);
found_fields.insert(index);
}
None => {
let name = pprust::path_to_str(path);
// Check the pattern anyway, so that attempts to look
// up its type won't fail
check_pat(pcx, field.pat, ty::mk_err());
tcx.sess.span_err(span,
format!("struct `{}` does not have a field named `{}`",
name,
token::get_ident(field.ident)));
}
}
}
// Report an error if not all the fields were specified.
if !etc {
for (i, field) in class_fields.iter().enumerate() {
if found_fields.contains(&i) {
continue;
}
tcx.sess.span_err(span,
format!("pattern does not mention field `{}`",
token::get_name(field.name)));
}
}
}
pub fn check_struct_pat(pcx: &pat_ctxt, pat_id: ast::NodeId, span: Span,
expected: ty::t, path: &ast::Path,
fields: &[ast::FieldPat], etc: bool,
struct_id: ast::DefId,
substitutions: &ty::substs) {
let fcx = pcx.fcx;
let tcx = pcx.fcx.ccx.tcx;
let class_fields = ty::lookup_struct_fields(tcx, struct_id);
// Check to ensure that the struct is the one specified.
match tcx.def_map.borrow().find(&pat_id) {
Some(&ast::DefStruct(supplied_def_id))
if supplied_def_id == struct_id => {
// OK.
}
Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => {
let name = pprust::path_to_str(path);
tcx.sess.span_err(span,
format!("mismatched types: expected `{}` but found `{}`",
fcx.infcx().ty_to_str(expected),
name));
}
_ => {
tcx.sess.span_bug(span, "resolve didn't write in struct ID");
}
}
check_struct_pat_fields(pcx, span, path, fields, class_fields, struct_id,
substitutions, etc);
}
pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
pat_id: ast::NodeId,
span: Span,
expected: ty::t,
path: &ast::Path,
fields: &[ast::FieldPat],
etc: bool,
enum_id: ast::DefId,
substitutions: &ty::substs) {
let fcx = pcx.fcx;
let tcx = pcx.fcx.ccx.tcx;
// Find the variant that was specified.
match tcx.def_map.borrow().find(&pat_id) {
Some(&ast::DefVariant(found_enum_id, variant_id, _))
if found_enum_id == enum_id => {
// Get the struct fields from this struct-like enum variant.
let class_fields = ty::lookup_struct_fields(tcx, variant_id);
check_struct_pat_fields(pcx, span, path, fields, class_fields,
variant_id, substitutions, etc);
}
Some(&ast::DefStruct(..)) | Some(&ast::DefVariant(..)) => {
let name = pprust::path_to_str(path);
tcx.sess.span_err(span,
format!("mismatched types: expected `{}` but \
found `{}`",
fcx.infcx().ty_to_str(expected),
name));
}
_ => {
tcx.sess.span_bug(span, "resolve didn't write in variant");
}
}
}
// Pattern checking is top-down rather than bottom-up so that bindings get
// their types immediately.
pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
let fcx = pcx.fcx;
let tcx = pcx.fcx.ccx.tcx;
match pat.node {
ast::PatWild | ast::PatWildMulti => {
fcx.write_ty(pat.id, expected);
}
ast::PatLit(lt) => {
check_expr_has_type(fcx, lt, expected);
fcx.write_ty(pat.id, fcx.expr_ty(lt));
}
ast::PatRange(begin, end) => {
check_expr_has_type(fcx, begin, expected);
check_expr_has_type(fcx, end, expected);
let b_ty =
fcx.infcx().resolve_type_vars_if_possible(fcx.expr_ty(begin));
let e_ty =
fcx.infcx().resolve_type_vars_if_possible(fcx.expr_ty(end));
debug!("pat_range beginning type: {:?}", b_ty);
debug!("pat_range ending type: {:?}", e_ty);
if !require_same_types(
tcx, Some(fcx.infcx()), false, pat.span, b_ty, e_ty,
|| "mismatched types in range".to_owned())
{
// no-op
} else if !ty::type_is_numeric(b_ty) && !ty::type_is_char(b_ty) {
tcx.sess.span_err(pat.span, "non-numeric type used in range");
} else {
match valid_range_bounds(fcx.ccx, begin, end) {
Some(false) => {
tcx.sess.span_err(begin.span,
"lower range bound must be less than upper");
},
None => {
tcx.sess.span_err(begin.span,
"mismatched types in range");
},
_ => { },
}
}
fcx.write_ty(pat.id, b_ty);
}
ast::PatEnum(..) |
ast::PatIdent(..) if pat_is_const(&tcx.def_map, pat) => {
let const_did = ast_util::def_id_of_def(tcx.def_map.borrow()
.get_copy(&pat.id));
let const_tpt = ty::lookup_item_type(tcx, const_did);
demand::suptype(fcx, pat.span, expected, const_tpt.ty);
fcx.write_ty(pat.id, const_tpt.ty);
}
ast::PatIdent(bm, ref name, sub) if pat_is_binding(&tcx.def_map, pat) => {
let typ = fcx.local_ty(pat.span, pat.id);
match bm {
ast::BindByRef(mutbl) => {
// if the binding is like
// ref x | ref const x | ref mut x
// then the type of x is &M T where M is the mutability
// and T is the expected type
let region_var =
fcx.infcx().next_region_var(
infer::PatternRegion(pat.span));
let mt = ty::mt {ty: expected, mutbl: mutbl};
let region_ty = ty::mk_rptr(tcx, region_var, mt);
demand::eqtype(fcx, pat.span, region_ty, typ);
}
// otherwise the type of x is the expected type T
ast::BindByValue(_) => {
demand::eqtype(fcx, pat.span, expected, typ);
}
}
let canon_id = *pcx.map.get(&ast_util::path_to_ident(name));
if canon_id != pat.id {
let ct = fcx.local_ty(pat.span, canon_id);
demand::eqtype(fcx, pat.span, ct, typ);
}
fcx.write_ty(pat.id, typ);
debug!("(checking match) writing type for pat id {}", pat.id);
match sub {
Some(p) => check_pat(pcx, p, expected),
_ => ()
}
}
ast::PatIdent(_, ref path, _) => {
check_pat_variant(pcx, pat, path, &Some(Vec::new()), expected);
}
ast::PatEnum(ref path, ref subpats) => {
check_pat_variant(pcx, pat, path, subpats, expected);
}
ast::PatStruct(ref path, ref fields, etc) => {
// Grab the class data that we care about.
let structure = structure_of(fcx, pat.span, expected);
let mut error_happened = false;
match *structure {
ty::ty_struct(cid, ref substs) => {
check_struct_pat(pcx, pat.id, pat.span, expected, path,
fields.as_slice(), etc, cid, substs);
}
ty::ty_enum(eid, ref substs) => {
check_struct_like_enum_variant_pat(pcx,
pat.id,
pat.span,
expected,
path,
fields.as_slice(),
etc,
eid,
substs);
}
_ => {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "a structure pattern".to_owned(),
None);
match tcx.def_map.borrow().find(&pat.id) {
Some(&ast::DefStruct(supplied_def_id)) => {
check_struct_pat(pcx,
pat.id,
pat.span,
ty::mk_err(),
path,
fields.as_slice(),
etc,
supplied_def_id,
&ty::substs {
self_ty: None,
tps: Vec::new(),
regions: ty::ErasedRegions,
});
}
_ => () // Error, but we're already in an error case
}
error_happened = true;
}
}
// Finally, write in the type.
if error_happened {
fcx.write_error(pat.id);
} else {
fcx.write_ty(pat.id, expected);
}
}
ast::PatTup(ref elts) => {
let s = structure_of(fcx, pat.span, expected);
let e_count = elts.len();
match *s {
ty::ty_tup(ref ex_elts) if e_count == ex_elts.len() => {
for (i, elt) in elts.iter().enumerate() {
check_pat(pcx, *elt, *ex_elts.get(i));
}
fcx.write_ty(pat.id, expected);
}
_ => {
for elt in elts.iter() {
check_pat(pcx, *elt, ty::mk_err());
}
// use terr_tuple_size if both types are tuples
let type_error = match *s {
ty::ty_tup(ref ex_elts) =>
ty::terr_tuple_size(ty::expected_found{expected: ex_elts.len(),
found: e_count}),
_ => ty::terr_mismatch
};
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "tuple".to_owned(), Some(&type_error));
fcx.write_error(pat.id);
}
}
}
ast::PatUniq(inner) => {
check_pointer_pat(pcx, Send, inner, pat.id, pat.span, expected);
}
ast::PatRegion(inner) => {
check_pointer_pat(pcx, Borrowed, inner, pat.id, pat.span, expected);
}
ast::PatVec(ref before, slice, ref after) => {
let default_region_var =
fcx.infcx().next_region_var(
infer::PatternRegion(pat.span));
let check_err = || {
for &elt in before.iter() {
check_pat(pcx, elt, ty::mk_err());
}
for &elt in slice.iter() {
check_pat(pcx, elt, ty::mk_err());
}
for &elt in after.iter() {
check_pat(pcx, elt, ty::mk_err());
}
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(
pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected),
"a vector pattern".to_owned(),
None);
fcx.write_error(pat.id);
};
let (elt_type, region_var, mutbl) = match *structure_of(fcx,
pat.span,
expected) {
ty::ty_vec(mt, Some(_)) => (mt.ty, default_region_var, ast::MutImmutable),
ty::ty_uniq(t) => match ty::get(t).sty {
ty::ty_vec(mt, None) => {
fcx.type_error_message(pat.span,
|_| {
~"unique vector patterns are no \
longer supported"
},
expected,
None);
(mt.ty, default_region_var, ast::MutImmutable)
}
_ => {
check_err();
return;
}
},
ty::ty_rptr(r, mt) => match ty::get(mt.ty).sty {
ty::ty_vec(mt, None) => (mt.ty, r, mt.mutbl),
_ => {
check_err();
return;
}
},
_ => {
check_err();
return;
}
};
for elt in before.iter() {
check_pat(pcx, *elt, elt_type);
}
match slice {
Some(slice_pat) => {
let slice_ty = ty::mk_slice(tcx,
region_var,
ty::mt {ty: elt_type, mutbl: mutbl});
check_pat(pcx, slice_pat, slice_ty);
}
None => ()
}
for elt in after.iter() {
check_pat(pcx, *elt, elt_type);
}
fcx.write_ty(pat.id, expected);
}
}
}
// Helper function to check @, ~ and & patterns
pub fn check_pointer_pat(pcx: &pat_ctxt,
pointer_kind: PointerKind,
inner: &ast::Pat,
pat_id: ast::NodeId,
span: Span,
expected: ty::t) {
let fcx = pcx.fcx;
let check_inner: |ty::t| = |e_inner| {
check_pat(pcx, inner, e_inner);
fcx.write_ty(pat_id, expected);
};
match *structure_of(fcx, span, expected) {
ty::ty_uniq(e_inner) if pointer_kind == Send => {
check_inner(e_inner);
}
ty::ty_rptr(_, e_inner) if pointer_kind == Borrowed => {
check_inner(e_inner.ty);
}
_ => {
check_pat(pcx, inner, ty::mk_err());
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(
span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected),
format!("{} pattern", match pointer_kind {
Send => "a `~`-box",
Borrowed => "an `&`-pointer"
}),
None);
fcx.write_error(pat_id);
}
}
}
#[deriving(Eq)]
pub enum PointerKind { Send, Borrowed }<|fim▁end|> | |
<|file_name|>old_test.rs<|end_file_name|><|fim▁begin|>use wtools::str;
// //
// #[test]
// #[should_panic]
// fn split_trivial()
// {
// let mut opts = str::split::default();
// opts.delimeter( vec![ "" ] );
// let got = str::split( &opts );
// let exp : Vec<&str> = vec![];
// assert_eq!( got, exp );
// }
// //
// #[test]
// fn split_fast_preserving_empty1_preserving_delimenter1()
// {
// let mut opts = str::split_fast::default() ;
// opts.delimeter( vec![ "" ] );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "abc" ) );
// opts.delimeter( vec![ "" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "", "b", "", "c" ] );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "" ) );
// opts.delimeter( vec![ "a" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", " ", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", " ", "", " ", "", " ", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.delimeter( vec![ "c" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b a b" ) );
// opts.delimeter( vec![ "a" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "a", " b ", "a", " b" ] );
// /* */
// let mut opts = str::split_fast::default();
// opts.src( String::from( ".a" ) );
// opts.delimeter( vec![ ".", "#" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", ".", "a" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a." ) );
// opts.delimeter( vec![ ".", "#" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", ".", "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<! <<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<", " " ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa", " ", "", "<<!", "", " ", "", "<<-", "", " ", "Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa ", "<<<-", " Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "." ) );
// opts.delimeter( vec![ "." ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", ".", "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "..." ) );
// opts.delimeter( vec![ "." ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", ".", "", ".", "", ".", "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "\"a b\" x \"\" c" ) );
// opts.delimeter( vec![ "a b", " ", " c", "\"", "" ] );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "\"", "", "a b", "", "\"", "", " ", "x", "", "", " ", "", "\"", "", "\"", "", " ", "c" ] );
// }
// //
// #[test]
// fn split_fast_preserving_empty1_preserving_delimenter0()
// {
// let mut opts = str::split_fast::default() ;<|fim▁hole|>// opts.delimeter( vec![ "" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![ "", "" ];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "abc" ) );
// opts.delimeter( vec![ "" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "a", "b", "c", "" ] );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "" ) );
// opts.delimeter( vec![ "a" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "", "", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.delimeter( vec![ "c" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b a b" ) );
// opts.delimeter( vec![ "a" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", " b ", " b" ] );
// /* */
// let mut opts = str::split_fast::default();
// opts.src( String::from( ".a" ) );
// opts.delimeter( vec![ ".", "#" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "a" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a." ) );
// opts.delimeter( vec![ ".", "#" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<! <<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<", " " ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa", "", "", "", "", "Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa ", " Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "." ) );
// opts.delimeter( vec![ "." ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "..." ) );
// opts.delimeter( vec![ "." ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "", "", "" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "\"a b\" x \"\" c" ) );
// opts.delimeter( vec![ "a b", " ", " c", "\"", "" ] );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "", "", "", "", "x", "", "", "", "", "c" ] );
// }
// //
// #[test]
// fn split_fast_preserving_empty0_preserving_delimenter1()
// {
// let mut opts = str::split_fast::default() ;
// opts.delimeter( vec![ "" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "abc" ) );
// opts.delimeter( vec![ "" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "b", "c" ] );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "" ) );
// opts.delimeter( vec![ "a" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", " ", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", " ", " ", " ", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.delimeter( vec![ "c" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b a b" ) );
// opts.delimeter( vec![ "a" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", " b ", "a", " b" ] );
// /* */
// let mut opts = str::split_fast::default();
// opts.src( String::from( ".a" ) );
// opts.delimeter( vec![ ".", "#" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ ".", "a" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a." ) );
// opts.delimeter( vec![ ".", "#" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "." ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<! <<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<", " " ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa", " ", "<<!", " ", "<<-", " ", "Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa ", "<<<-", " Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "." ) );
// opts.delimeter( vec![ "." ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "." ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "..." ) );
// opts.delimeter( vec![ "." ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ ".", ".", "." ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "\"a b\" x \"\" c" ) );
// opts.delimeter( vec![ "a b", " ", " c", "\"", "" ] );
// opts.preserving_empty( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "\"", "a b", "\"", " ", "x", " ", "\"", "\"", " ", "c" ] );
// }
// //
// #[test]
// fn split_fast_preserving_empty0_preserving_delimenter0()
// {
// let mut opts = str::split_fast::default() ;
// opts.delimeter( vec![ "" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "abc" ) );
// opts.delimeter( vec![ "" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "b", "c" ] );
// let mut opts = str::split_fast::default() ;
// opts.src( String::from( "" ) );
// opts.delimeter( vec![ "a" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a", "b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b" ) );
// opts.delimeter( vec![ "c" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a b" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a b a b" ) );
// opts.delimeter( vec![ "a" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ " b ", " b" ] );
// /* */
// let mut opts = str::split_fast::default();
// opts.src( String::from( ".a" ) );
// opts.delimeter( vec![ ".", "#" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "a." ) );
// opts.delimeter( vec![ ".", "#" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "a" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<! <<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<", " " ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa", "Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "Aa <<<- Bb" ) );
// opts.delimeter( vec![ "->>>", "<<<-", "->>", "<<-", "!>>", "<<!", ">>", "<<" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "Aa ", " Bb" ] );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "." ) );
// opts.delimeter( vec![ "." ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "..." ) );
// opts.delimeter( vec![ "." ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// let exp: Vec<&str> = vec![];
// assert_eq!( got, exp );
// let mut opts = str::split_fast::default();
// opts.src( String::from( "\"a b\" x \"\" c" ) );
// opts.delimeter( vec![ "a b", " ", " c", "\"", "" ] );
// opts.preserving_empty( false );
// opts.preserving_delimeters( false );
// let got = str::split_fast( &opts );
// assert_eq!( got, vec![ "x", "c" ] );
// }
// //
// /*
// split_trivial
// split_fast_preserving_empty1_preserving_delimenter1
// split_fast_preserving_empty1_preserving_delimenter0
// split_fast_preserving_empty0_preserving_delimenter1
// split_fast_preserving_empty0_preserving_delimenter0
// */<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from django import test
from common import api
from common import util
from common import validate
from common.test import base
class CommonViewTest(base.ViewTestCase):<|fim▁hole|> r = self.login_and_get('popular', '/user/popular/overview/')
redirected = self.assertRedirectsPrefix(r, '/user/popular/overview')
self.assertTemplateUsed(redirected, 'actor/templates/overview.html')
def test_confirm(self):
nonce = util.create_nonce('popular', 'entry_remove')
entry = 'stream/popular%40example.com/presence/12345'
path = '/user/popular/overview'
r = self.login_and_get('popular', path, {'entry_remove': entry,
'_nonce': nonce})
r = self.assertRedirectsPrefix(r, '/confirm')
self.assertContains(r, nonce)
self.assertContains(r, entry)
self.assertContains(r, path)
class UtilTestCase(test.TestCase):
def test_get_user_from_topic(self):
topics = [('[email protected]', 'inbox/[email protected]/presence'),
('[email protected]', 'inbox/[email protected]/overview'),
('[email protected]', 'stream/[email protected]/presence/12345'),
(None, 'stream//presence'),
(None, 'stream/something/else'),
('[email protected]', 'crazy/[email protected]/dddfff$$%%///'),
('asdad@asdasd@asdasd', 'multi/asdad@asdasd@asdasd/cllad/asdff')]
for t in topics:
self.assertEqual(util.get_user_from_topic(t[1]), t[0], t[1])
# We're going to import the rest of the test cases into the local
# namespace so that we can run them as
# python manage.py test common.WhateverTest
from common.test.api import *
from common.test.clean import *
from common.test.db import *
from common.test.domain import *
from common.test.monitor import *
from common.test.notification import *
from common.test.patterns import *
from common.test.queue import *
from common.test.sms import *
from common.test.throttle import *
from common.test.validate import *
from common.templatetags.test.avatar import *
from common.templatetags.test.format import *
from common.templatetags.test.presence import *
# This is for legacy compat with older tests
# TODO(termie): remove me when no longer needed
from common.test.base import *
from common.test.util import *<|fim▁end|> | def test_redirect_slash(self): |
<|file_name|>migrate.py<|end_file_name|><|fim▁begin|>from venv import _venv
from fabric.api import task
@task
def migrate():
"""<|fim▁hole|> Run Django's migrate command
"""
_venv("python manage.py migrate")
@task
def syncdb():
"""
Run Django's syncdb command
"""
_venv("python manage.py syncdb")<|fim▁end|> | |
<|file_name|>tabs_with_multiselect.py<|end_file_name|><|fim▁begin|>from bokeh.io import save
from bokeh.models import MultiSelect, Tabs
select = MultiSelect(options=["First option", "Second option"])
tabs = Tabs(tabs=[("A tab", select)], width=300)<|fim▁hole|><|fim▁end|> |
save(tabs) |
<|file_name|>web.timers.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var $ = require('./$'),
$def = require('./$.def'),
invoke = require('./$.invoke'),
partial = require('./$.partial'),
navigator = $.g.navigator,
MSIE = !!navigator && /MSIE .\./.test(navigator.userAgent);
function wrap(set) {
return MSIE ? function(fn, time) {
return set(invoke(partial, [].slice.call(arguments, 2), $.isFunction(fn) ? fn : Function(fn)), time);
} : set;
}
$def($def.G + $def.B + $def.F * MSIE, {
setTimeout: wrap($.g.setTimeout),
setInterval: wrap($.g.setInterval)
});<|fim▁end|> | /* */ |
<|file_name|>1.4.1.py<|end_file_name|><|fim▁begin|>#coding:utf-8
'''
第一种方式:使用os模块中的fork方式实现多进程
import os
if __name__ == '__main__':
print 'current Process (%s) start ...'%(os.getpid())
pid = os.fork()
if pid < 0:
print 'error in fork'
elif pid == 0:
print 'I am child process(%s) and my parent process is (%s)',(os.getpid(),os.getppid())
else:
print 'I(%s) created a chlid process (%s).',(os.getpid(),pid)
'''
'''
第二种方法:使用multiprocessing模块创建多进程
import os
from multiprocessing import Process
# 子进程要执行的代码
def run_proc(name):
print 'Child process %s (%s) Running...' % (name, os.getpid())
if __name__ == '__main__':
print 'Parent process %s.' % os.getpid()
p_list=[]
for i in range(5):
p = Process(target=run_proc, args=(str(i),))
p_list.append(p)
print 'Process will start.'
p_list[i].start()
for p in p_list:
p.join()
print 'Process end.'
'''
'''
multiprocessing模块提供了一个Pool类来代表进程池对象
from multiprocessing import Pool
import os, time, random
def run_task(name):
print 'Task %s (pid = %s) is running...' % (name, os.getpid())
time.sleep(random.random() * 3)
print 'Task %s end.' % name
if __name__=='__main__':
print 'Current process %s.' % os.getpid()
p = Pool(processes=3)
for i in range(5):
p.apply_async(run_task, args=(i,))
print 'Waiting for all subprocesses done...'
p.close()
p.join()
print 'All subprocesses done.'
'''
'''
Queue进程间通信
from multiprocessing import Process, Queue
import os, time, random
# 写数据进程执行的代码:
def proc_write(q,urls):
print('Process(%s) is writing...' % os.getpid())
for url in urls:
q.put(url)
print('Put %s to queue...' % url)
time.sleep(random.random())
# 读数据进程执行的代码:
def proc_read(q):
print('Process(%s) is reading...' % os.getpid())
while True:
url = q.get(True)
print('Get %s from queue.' % url)
if __name__=='__main__':
# 父进程创建Queue,并传给各个子进程:<|fim▁hole|> proc_writer1 = Process(target=proc_write, args=(q,['url_1', 'url_2', 'url_3']))
proc_writer2 = Process(target=proc_write, args=(q,['url_4','url_5','url_6']))
proc_reader = Process(target=proc_read, args=(q,))
# 启动子进程proc_writer,写入:
proc_writer1.start()
proc_writer2.start()
# 启动子进程proc_reader,读取:
proc_reader.start()
# 等待proc_writer结束:
proc_writer1.join()
proc_writer2.join()
# proc_reader进程里是死循环,无法等待其结束,只能强行终止:
proc_reader.terminate()
'''
'''
pipe进程间通信
import multiprocessing
import random
import time,os
def proc_send(pipe,urls):
for url in urls:
print "Process(%s) send: %s" %(os.getpid(),url)
pipe.send(url)
time.sleep(random.random())
def proc_recv(pipe):
while True:
print "Process(%s) rev:%s" %(os.getpid(),pipe.recv())
time.sleep(random.random())
'''<|fim▁end|> | q = Queue() |
<|file_name|>0018_merge.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
<|fim▁hole|> ('clients', '0015_auto_20151025_1209'),
]
operations = [
]<|fim▁end|> | class Migration(migrations.Migration):
dependencies = [
('clients', '0017_auto_20151025_1240'), |
<|file_name|>left_encoder.py<|end_file_name|><|fim▁begin|>import pigpio<|fim▁hole|>class LeftEncoder:
def __init__(self, pin=24):
self.pi = pigpio.pi()
self.pin = pin
self.pi.set_mode(pin, pigpio.INPUT)
self.pi.set_pull_up_down(pin, pigpio.PUD_UP)
cb1 = self.pi.callback(pin, pigpio.EITHER_EDGE, self.cbf)
self.tick = 0
def cbf(self, gpio, level, tick):
# print(gpio, level, tick)
print(self.tick)
self.tick += 1
e = LeftEncoder()
while True:
time.sleep(.01)<|fim▁end|> | import time
|
<|file_name|>expr_method.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(never_type)]
#![allow(unused_variables)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![deny(unreachable_code)]
struct Foo;
impl Foo {
fn foo(&self, x: !, y: usize) { }
fn bar(&self, x: !) { }
}
fn a() {
// the `22` is unreachable:
Foo.foo(return, 22); //~ ERROR unreachable
}
fn b() {
// the call is unreachable:
Foo.bar(return); //~ ERROR unreachable
}
<|fim▁hole|><|fim▁end|> | fn main() { } |
<|file_name|>option.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Optional values.
//!
//! Type [`Option`] represents an optional value: every [`Option`]
//! is either [`Some`] and contains a value, or [`None`], and
//! does not. [`Option`] types are very common in Rust code, as
//! they have a number of uses:
//!
//! * Initial values
//! * Return values for functions that are not defined
//! over their entire input range (partial functions)
//! * Return value for otherwise reporting simple errors, where `None` is
//! returned on error
//! * Optional struct fields
//! * Struct fields that can be loaned or "taken"
//! * Optional function arguments
//! * Nullable pointers
//! * Swapping things out of difficult situations
//!
//! [`Option`]s are commonly paired with pattern matching to query the presence
//! of a value and take action, always accounting for the [`None`] case.
//!
//! ```
//! fn divide(numerator: f64, denominator: f64) -> Option<f64> {
//! if denominator == 0.0 {
//! None
//! } else {
//! Some(numerator / denominator)
//! }
//! }
//!
//! // The return value of the function is an option
//! let result = divide(2.0, 3.0);
//!
//! // Pattern match to retrieve the value
//! match result {
//! // The division was valid
//! Some(x) => println!("Result: {}", x),
//! // The division was invalid
//! None => println!("Cannot divide by 0"),
//! }
//! ```
//!
//
// FIXME: Show how `Option` is used in practice, with lots of methods
//
//! # Options and pointers ("nullable" pointers)
//!
//! Rust's pointer types must always point to a valid location; there are
//! no "null" pointers. Instead, Rust has *optional* pointers, like
//! the optional owned box, [`Option`]`<`[`Box<T>`]`>`.
//!
//! The following example uses [`Option`] to create an optional box of
//! [`i32`]. Notice that in order to use the inner [`i32`] value first, the
//! `check_optional` function needs to use pattern matching to
//! determine whether the box has a value (i.e., it is [`Some(...)`][`Some`]) or
//! not ([`None`]).
//!
//! ```
//! let optional = None;
//! check_optional(optional);
//!
//! let optional = Some(Box::new(9000));
//! check_optional(optional);
//!
//! fn check_optional(optional: Option<Box<i32>>) {
//! match optional {
//! Some(ref p) => println!("has value {}", p),
//! None => println!("has no value"),
//! }
//! }
//! ```
//!
//! This usage of [`Option`] to create safe nullable pointers is so
//! common that Rust does special optimizations to make the
//! representation of [`Option`]`<`[`Box<T>`]`>` a single pointer. Optional pointers
//! in Rust are stored as efficiently as any other pointer type.
//!
//! # Examples
//!
//! Basic pattern matching on [`Option`]:
//!
//! ```
//! let msg = Some("howdy");
//!
//! // Take a reference to the contained string
//! if let Some(ref m) = msg {
//! println!("{}", *m);
//! }
//!
//! // Remove the contained string, destroying the Option
//! let unwrapped_msg = msg.unwrap_or("default message");
//! ```
//!
//! Initialize a result to [`None`] before a loop:
//!
//! ```
//! enum Kingdom { Plant(u32, &'static str), Animal(u32, &'static str) }
//!
//! // A list of data to search through.
//! let all_the_big_things = [
//! Kingdom::Plant(250, "redwood"),
//! Kingdom::Plant(230, "noble fir"),
//! Kingdom::Plant(229, "sugar pine"),
//! Kingdom::Animal(25, "blue whale"),
//! Kingdom::Animal(19, "fin whale"),
//! Kingdom::Animal(15, "north pacific right whale"),
//! ];
//!
//! // We're going to search for the name of the biggest animal,
//! // but to start with we've just got `None`.
//! let mut name_of_biggest_animal = None;
//! let mut size_of_biggest_animal = 0;
//! for big_thing in &all_the_big_things {
//! match *big_thing {
//! Kingdom::Animal(size, name) if size > size_of_biggest_animal => {
//! // Now we've found the name of some big animal
//! size_of_biggest_animal = size;
//! name_of_biggest_animal = Some(name);
//! }
//! Kingdom::Animal(..) | Kingdom::Plant(..) => ()
//! }
//! }
//!
//! match name_of_biggest_animal {
//! Some(name) => println!("the biggest animal is {}", name),
//! None => println!("there are no animals :("),
//! }
//! ```
//!
//! [`Option`]: enum.Option.html
//! [`Some`]: enum.Option.html#variant.Some
//! [`None`]: enum.Option.html#variant.None
//! [`Box<T>`]: ../../std/boxed/struct.Box.html
//! [`i32`]: ../../std/primitive.i32.html
#![stable(feature = "rust1", since = "1.0.0")]
use iter::{FromIterator, FusedIterator, TrustedLen};
use {hint, mem, ops::{self, Deref}};
use pin::Pin;
// Note that this is not a lang item per se, but it has a hidden dependency on
// `Iterator`, which is one. The compiler assumes that the `next` method of
// `Iterator` is an enumeration with one type parameter and two variants,
// which basically means it must be `Option`.
/// The `Option` type. See [the module level documentation](index.html) for more.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Option<T> {
/// No value
#[stable(feature = "rust1", since = "1.0.0")]
None,
/// Some value `T`
#[stable(feature = "rust1", since = "1.0.0")]
Some(#[stable(feature = "rust1", since = "1.0.0")] T),
}
/////////////////////////////////////////////////////////////////////////////
// Type implementation
/////////////////////////////////////////////////////////////////////////////
impl<T> Option<T> {
/////////////////////////////////////////////////////////////////////////
// Querying the contained values
/////////////////////////////////////////////////////////////////////////
/// Returns `true` if the option is a [`Some`] value.
///
/// # Examples
///
/// ```
/// let x: Option<u32> = Some(2);
/// assert_eq!(x.is_some(), true);
///
/// let x: Option<u32> = None;
/// assert_eq!(x.is_some(), false);
/// ```
///
/// [`Some`]: #variant.Some
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_some(&self) -> bool {
match *self {
Some(_) => true,
None => false,
}
}
/// Returns `true` if the option is a [`None`] value.
///
/// # Examples
///
/// ```
/// let x: Option<u32> = Some(2);
/// assert_eq!(x.is_none(), false);
///
/// let x: Option<u32> = None;
/// assert_eq!(x.is_none(), true);
/// ```
///
/// [`None`]: #variant.None
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_none(&self) -> bool {
!self.is_some()
}
/////////////////////////////////////////////////////////////////////////
// Adapter for working with references
/////////////////////////////////////////////////////////////////////////
/// Converts from `Option<T>` to `Option<&T>`.
///
/// # Examples
///
/// Convert an `Option<`[`String`]`>` into an `Option<`[`usize`]`>`, preserving the original.
/// The [`map`] method takes the `self` argument by value, consuming the original,
/// so this technique uses `as_ref` to first take an `Option` to a reference
/// to the value inside the original.
///
/// [`map`]: enum.Option.html#method.map
/// [`String`]: ../../std/string/struct.String.html
/// [`usize`]: ../../std/primitive.usize.html
///
/// ```
/// let text: Option<String> = Some("Hello, world!".to_string());
/// // First, cast `Option<String>` to `Option<&String>` with `as_ref`,
/// // then consume *that* with `map`, leaving `text` on the stack.
/// let text_length: Option<usize> = text.as_ref().map(|s| s.len());
/// println!("still can print text: {:?}", text);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_ref(&self) -> Option<&T> {
match *self {
Some(ref x) => Some(x),
None => None,
}
}
/// Converts from `Option<T>` to `Option<&mut T>`.
///
/// # Examples
///
/// ```
/// let mut x = Some(2);
/// match x.as_mut() {
/// Some(v) => *v = 42,
/// None => {},
/// }
/// assert_eq!(x, Some(42));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_mut(&mut self) -> Option<&mut T> {
match *self {
Some(ref mut x) => Some(x),
None => None,
}
}
/// Converts from `Pin<&Option<T>>` to `Option<Pin<&T>>`
#[inline]
#[unstable(feature = "pin", issue = "49150")]
pub fn as_pin_ref<'a>(self: Pin<&'a Option<T>>) -> Option<Pin<&'a T>> {
unsafe {
Pin::get_ref(self).as_ref().map(|x| Pin::new_unchecked(x))
}
}
/// Converts from `Pin<&mut Option<T>>` to `Option<Pin<&mut T>>`
#[inline]
#[unstable(feature = "pin", issue = "49150")]
pub fn as_pin_mut<'a>(self: Pin<&'a mut Option<T>>) -> Option<Pin<&'a mut T>> {
unsafe {
Pin::get_mut_unchecked(self).as_mut().map(|x| Pin::new_unchecked(x))
}
}
/////////////////////////////////////////////////////////////////////////
// Getting to contained values
/////////////////////////////////////////////////////////////////////////
/// Unwraps an option, yielding the content of a [`Some`].
///
/// # Panics
///
/// Panics if the value is a [`None`] with a custom panic message provided by
/// `msg`.
///
/// [`Some`]: #variant.Some
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// let x = Some("value");
/// assert_eq!(x.expect("the world is ending"), "value");
/// ```
///
/// ```{.should_panic}
/// let x: Option<&str> = None;
/// x.expect("the world is ending"); // panics with `the world is ending`
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn expect(self, msg: &str) -> T {
match self {
Some(val) => val,
None => expect_failed(msg),
}
}
/// Moves the value `v` out of the `Option<T>` if it is [`Some(v)`].
///
/// In general, because this function may panic, its use is discouraged.
/// Instead, prefer to use pattern matching and handle the [`None`]
/// case explicitly.
///
/// # Panics
///
/// Panics if the self value equals [`None`].
///
/// [`Some(v)`]: #variant.Some
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// let x = Some("air");
/// assert_eq!(x.unwrap(), "air");
/// ```
///
/// ```{.should_panic}
/// let x: Option<&str> = None;
/// assert_eq!(x.unwrap(), "air"); // fails
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap(self) -> T {
match self {
Some(val) => val,
None => panic!("called `Option::unwrap()` on a `None` value"),
}
}
/// Returns the contained value or a default.
///
/// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing
/// the result of a function call, it is recommended to use [`unwrap_or_else`],
/// which is lazily evaluated.
///
/// [`unwrap_or_else`]: #method.unwrap_or_else
///
/// # Examples
///
/// ```
/// assert_eq!(Some("car").unwrap_or("bike"), "car");
/// assert_eq!(None.unwrap_or("bike"), "bike");
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_or(self, def: T) -> T {
match self {
Some(x) => x,
None => def,
}
}
/// Returns the contained value or computes it from a closure.
///
/// # Examples
///
/// ```
/// let k = 10;
/// assert_eq!(Some(4).unwrap_or_else(|| 2 * k), 4);
/// assert_eq!(None.unwrap_or_else(|| 2 * k), 20);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_or_else<F: FnOnce() -> T>(self, f: F) -> T {
match self {
Some(x) => x,
None => f(),
}
}
/////////////////////////////////////////////////////////////////////////
// Transforming contained values
/////////////////////////////////////////////////////////////////////////
/// Maps an `Option<T>` to `Option<U>` by applying a function to a contained value.
///
/// # Examples
///
/// Convert an `Option<`[`String`]`>` into an `Option<`[`usize`]`>`, consuming the original:
///
/// [`String`]: ../../std/string/struct.String.html
/// [`usize`]: ../../std/primitive.usize.html
///
/// ```
/// let maybe_some_string = Some(String::from("Hello, World!"));
/// // `Option::map` takes self *by value*, consuming `maybe_some_string`
/// let maybe_some_len = maybe_some_string.map(|s| s.len());
///
/// assert_eq!(maybe_some_len, Some(13));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> {
match self {
Some(x) => Some(f(x)),
None => None,
}
}
/// Applies a function to the contained value (if any),
/// or returns the provided default (if not).
///
/// # Examples
///
/// ```
/// let x = Some("foo");
/// assert_eq!(x.map_or(42, |v| v.len()), 3);
///
/// let x: Option<&str> = None;
/// assert_eq!(x.map_or(42, |v| v.len()), 42);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn map_or<U, F: FnOnce(T) -> U>(self, default: U, f: F) -> U {
match self {
Some(t) => f(t),
None => default,
}
}
/// Applies a function to the contained value (if any),
/// or computes a default (if not).
///
/// # Examples
///
/// ```
/// let k = 21;<|fim▁hole|> ///
/// let x = Some("foo");
/// assert_eq!(x.map_or_else(|| 2 * k, |v| v.len()), 3);
///
/// let x: Option<&str> = None;
/// assert_eq!(x.map_or_else(|| 2 * k, |v| v.len()), 42);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn map_or_else<U, D: FnOnce() -> U, F: FnOnce(T) -> U>(self, default: D, f: F) -> U {
match self {
Some(t) => f(t),
None => default(),
}
}
/// Transforms the `Option<T>` into a [`Result<T, E>`], mapping [`Some(v)`] to
/// [`Ok(v)`] and [`None`] to [`Err(err)`].
///
/// Arguments passed to `ok_or` are eagerly evaluated; if you are passing the
/// result of a function call, it is recommended to use [`ok_or_else`], which is
/// lazily evaluated.
///
/// [`Result<T, E>`]: ../../std/result/enum.Result.html
/// [`Ok(v)`]: ../../std/result/enum.Result.html#variant.Ok
/// [`Err(err)`]: ../../std/result/enum.Result.html#variant.Err
/// [`None`]: #variant.None
/// [`Some(v)`]: #variant.Some
/// [`ok_or_else`]: #method.ok_or_else
///
/// # Examples
///
/// ```
/// let x = Some("foo");
/// assert_eq!(x.ok_or(0), Ok("foo"));
///
/// let x: Option<&str> = None;
/// assert_eq!(x.ok_or(0), Err(0));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn ok_or<E>(self, err: E) -> Result<T, E> {
match self {
Some(v) => Ok(v),
None => Err(err),
}
}
/// Transforms the `Option<T>` into a [`Result<T, E>`], mapping [`Some(v)`] to
/// [`Ok(v)`] and [`None`] to [`Err(err())`].
///
/// [`Result<T, E>`]: ../../std/result/enum.Result.html
/// [`Ok(v)`]: ../../std/result/enum.Result.html#variant.Ok
/// [`Err(err())`]: ../../std/result/enum.Result.html#variant.Err
/// [`None`]: #variant.None
/// [`Some(v)`]: #variant.Some
///
/// # Examples
///
/// ```
/// let x = Some("foo");
/// assert_eq!(x.ok_or_else(|| 0), Ok("foo"));
///
/// let x: Option<&str> = None;
/// assert_eq!(x.ok_or_else(|| 0), Err(0));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn ok_or_else<E, F: FnOnce() -> E>(self, err: F) -> Result<T, E> {
match self {
Some(v) => Ok(v),
None => Err(err()),
}
}
/////////////////////////////////////////////////////////////////////////
// Iterator constructors
/////////////////////////////////////////////////////////////////////////
/// Returns an iterator over the possibly contained value.
///
/// # Examples
///
/// ```
/// let x = Some(4);
/// assert_eq!(x.iter().next(), Some(&4));
///
/// let x: Option<u32> = None;
/// assert_eq!(x.iter().next(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
Iter { inner: Item { opt: self.as_ref() } }
}
/// Returns a mutable iterator over the possibly contained value.
///
/// # Examples
///
/// ```
/// let mut x = Some(4);
/// match x.iter_mut().next() {
/// Some(v) => *v = 42,
/// None => {},
/// }
/// assert_eq!(x, Some(42));
///
/// let mut x: Option<u32> = None;
/// assert_eq!(x.iter_mut().next(), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut { inner: Item { opt: self.as_mut() } }
}
/////////////////////////////////////////////////////////////////////////
// Boolean operations on the values, eager and lazy
/////////////////////////////////////////////////////////////////////////
/// Returns [`None`] if the option is [`None`], otherwise returns `optb`.
///
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// let x = Some(2);
/// let y: Option<&str> = None;
/// assert_eq!(x.and(y), None);
///
/// let x: Option<u32> = None;
/// let y = Some("foo");
/// assert_eq!(x.and(y), None);
///
/// let x = Some(2);
/// let y = Some("foo");
/// assert_eq!(x.and(y), Some("foo"));
///
/// let x: Option<u32> = None;
/// let y: Option<&str> = None;
/// assert_eq!(x.and(y), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn and<U>(self, optb: Option<U>) -> Option<U> {
match self {
Some(_) => optb,
None => None,
}
}
/// Returns [`None`] if the option is [`None`], otherwise calls `f` with the
/// wrapped value and returns the result.
///
/// Some languages call this operation flatmap.
///
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// fn sq(x: u32) -> Option<u32> { Some(x * x) }
/// fn nope(_: u32) -> Option<u32> { None }
///
/// assert_eq!(Some(2).and_then(sq).and_then(sq), Some(16));
/// assert_eq!(Some(2).and_then(sq).and_then(nope), None);
/// assert_eq!(Some(2).and_then(nope).and_then(sq), None);
/// assert_eq!(None.and_then(sq).and_then(sq), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn and_then<U, F: FnOnce(T) -> Option<U>>(self, f: F) -> Option<U> {
match self {
Some(x) => f(x),
None => None,
}
}
/// Returns `None` if the option is `None`, otherwise calls `predicate`
/// with the wrapped value and returns:
///
/// - `Some(t)` if `predicate` returns `true` (where `t` is the wrapped
/// value), and
/// - `None` if `predicate` returns `false`.
///
/// This function works similar to `Iterator::filter()`. You can imagine
/// the `Option<T>` being an iterator over one or zero elements. `filter()`
/// lets you decide which elements to keep.
///
/// # Examples
///
/// ```rust
/// fn is_even(n: &i32) -> bool {
/// n % 2 == 0
/// }
///
/// assert_eq!(None.filter(is_even), None);
/// assert_eq!(Some(3).filter(is_even), None);
/// assert_eq!(Some(4).filter(is_even), Some(4));
/// ```
#[inline]
#[stable(feature = "option_filter", since = "1.27.0")]
pub fn filter<P: FnOnce(&T) -> bool>(self, predicate: P) -> Self {
if let Some(x) = self {
if predicate(&x) {
return Some(x)
}
}
None
}
/// Returns the option if it contains a value, otherwise returns `optb`.
///
/// Arguments passed to `or` are eagerly evaluated; if you are passing the
/// result of a function call, it is recommended to use [`or_else`], which is
/// lazily evaluated.
///
/// [`or_else`]: #method.or_else
///
/// # Examples
///
/// ```
/// let x = Some(2);
/// let y = None;
/// assert_eq!(x.or(y), Some(2));
///
/// let x = None;
/// let y = Some(100);
/// assert_eq!(x.or(y), Some(100));
///
/// let x = Some(2);
/// let y = Some(100);
/// assert_eq!(x.or(y), Some(2));
///
/// let x: Option<u32> = None;
/// let y = None;
/// assert_eq!(x.or(y), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or(self, optb: Option<T>) -> Option<T> {
match self {
Some(_) => self,
None => optb,
}
}
/// Returns the option if it contains a value, otherwise calls `f` and
/// returns the result.
///
/// # Examples
///
/// ```
/// fn nobody() -> Option<&'static str> { None }
/// fn vikings() -> Option<&'static str> { Some("vikings") }
///
/// assert_eq!(Some("barbarians").or_else(vikings), Some("barbarians"));
/// assert_eq!(None.or_else(vikings), Some("vikings"));
/// assert_eq!(None.or_else(nobody), None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or_else<F: FnOnce() -> Option<T>>(self, f: F) -> Option<T> {
match self {
Some(_) => self,
None => f(),
}
}
/// Returns [`Some`] if exactly one of `self`, `optb` is [`Some`], otherwise returns `None`.
///
/// [`Some`]: #variant.Some
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// #![feature(option_xor)]
///
/// let x = Some(2);
/// let y: Option<u32> = None;
/// assert_eq!(x.xor(y), Some(2));
///
/// let x: Option<u32> = None;
/// let y = Some(2);
/// assert_eq!(x.xor(y), Some(2));
///
/// let x = Some(2);
/// let y = Some(2);
/// assert_eq!(x.xor(y), None);
///
/// let x: Option<u32> = None;
/// let y: Option<u32> = None;
/// assert_eq!(x.xor(y), None);
/// ```
#[inline]
#[unstable(feature = "option_xor", issue = "50512")]
pub fn xor(self, optb: Option<T>) -> Option<T> {
match (self, optb) {
(Some(a), None) => Some(a),
(None, Some(b)) => Some(b),
_ => None,
}
}
/////////////////////////////////////////////////////////////////////////
// Entry-like operations to insert if None and return a reference
/////////////////////////////////////////////////////////////////////////
/// Inserts `v` into the option if it is [`None`], then
/// returns a mutable reference to the contained value.
///
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// let mut x = None;
///
/// {
/// let y: &mut u32 = x.get_or_insert(5);
/// assert_eq!(y, &5);
///
/// *y = 7;
/// }
///
/// assert_eq!(x, Some(7));
/// ```
#[inline]
#[stable(feature = "option_entry", since = "1.20.0")]
pub fn get_or_insert(&mut self, v: T) -> &mut T {
match *self {
None => *self = Some(v),
_ => (),
}
match *self {
Some(ref mut v) => v,
None => unsafe { hint::unreachable_unchecked() },
}
}
/// Inserts a value computed from `f` into the option if it is [`None`], then
/// returns a mutable reference to the contained value.
///
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// let mut x = None;
///
/// {
/// let y: &mut u32 = x.get_or_insert_with(|| 5);
/// assert_eq!(y, &5);
///
/// *y = 7;
/// }
///
/// assert_eq!(x, Some(7));
/// ```
#[inline]
#[stable(feature = "option_entry", since = "1.20.0")]
pub fn get_or_insert_with<F: FnOnce() -> T>(&mut self, f: F) -> &mut T {
match *self {
None => *self = Some(f()),
_ => (),
}
match *self {
Some(ref mut v) => v,
None => unsafe { hint::unreachable_unchecked() },
}
}
/////////////////////////////////////////////////////////////////////////
// Misc
/////////////////////////////////////////////////////////////////////////
/// Takes the value out of the option, leaving a [`None`] in its place.
///
/// [`None`]: #variant.None
///
/// # Examples
///
/// ```
/// let mut x = Some(2);
/// let y = x.take();
/// assert_eq!(x, None);
/// assert_eq!(y, Some(2));
///
/// let mut x: Option<u32> = None;
/// let y = x.take();
/// assert_eq!(x, None);
/// assert_eq!(y, None);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn take(&mut self) -> Option<T> {
mem::replace(self, None)
}
/// Replaces the actual value in the option by the value given in parameter,
/// returning the old value if present,
/// leaving a [`Some`] in its place without deinitializing either one.
///
/// [`Some`]: #variant.Some
///
/// # Examples
///
/// ```
/// let mut x = Some(2);
/// let old = x.replace(5);
/// assert_eq!(x, Some(5));
/// assert_eq!(old, Some(2));
///
/// let mut x = None;
/// let old = x.replace(3);
/// assert_eq!(x, Some(3));
/// assert_eq!(old, None);
/// ```
#[inline]
#[stable(feature = "option_replace", since = "1.31.0")]
pub fn replace(&mut self, value: T) -> Option<T> {
mem::replace(self, Some(value))
}
}
impl<'a, T: Clone> Option<&'a T> {
/// Maps an `Option<&T>` to an `Option<T>` by cloning the contents of the
/// option.
///
/// # Examples
///
/// ```
/// let x = 12;
/// let opt_x = Some(&x);
/// assert_eq!(opt_x, Some(&12));
/// let cloned = opt_x.cloned();
/// assert_eq!(cloned, Some(12));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn cloned(self) -> Option<T> {
self.map(|t| t.clone())
}
}
impl<'a, T: Clone> Option<&'a mut T> {
/// Maps an `Option<&mut T>` to an `Option<T>` by cloning the contents of the
/// option.
///
/// # Examples
///
/// ```
/// let mut x = 12;
/// let opt_x = Some(&mut x);
/// assert_eq!(opt_x, Some(&mut 12));
/// let cloned = opt_x.cloned();
/// assert_eq!(cloned, Some(12));
/// ```
#[stable(since = "1.26.0", feature = "option_ref_mut_cloned")]
pub fn cloned(self) -> Option<T> {
self.map(|t| t.clone())
}
}
impl<T: Default> Option<T> {
/// Returns the contained value or a default
///
/// Consumes the `self` argument then, if [`Some`], returns the contained
/// value, otherwise if [`None`], returns the [default value] for that
/// type.
///
/// # Examples
///
/// Convert a string to an integer, turning poorly-formed strings
/// into 0 (the default value for integers). [`parse`] converts
/// a string to any other type that implements [`FromStr`], returning
/// [`None`] on error.
///
/// ```
/// let good_year_from_input = "1909";
/// let bad_year_from_input = "190blarg";
/// let good_year = good_year_from_input.parse().ok().unwrap_or_default();
/// let bad_year = bad_year_from_input.parse().ok().unwrap_or_default();
///
/// assert_eq!(1909, good_year);
/// assert_eq!(0, bad_year);
/// ```
///
/// [`Some`]: #variant.Some
/// [`None`]: #variant.None
/// [default value]: ../default/trait.Default.html#tymethod.default
/// [`parse`]: ../../std/primitive.str.html#method.parse
/// [`FromStr`]: ../../std/str/trait.FromStr.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_or_default(self) -> T {
match self {
Some(x) => x,
None => Default::default(),
}
}
}
#[unstable(feature = "inner_deref", reason = "newly added", issue = "50264")]
impl<T: Deref> Option<T> {
/// Converts from `&Option<T>` to `Option<&T::Target>`.
///
/// Leaves the original Option in-place, creating a new one with a reference
/// to the original one, additionally coercing the contents via `Deref`.
pub fn deref(&self) -> Option<&T::Target> {
self.as_ref().map(|t| t.deref())
}
}
impl<T, E> Option<Result<T, E>> {
/// Transposes an `Option` of a `Result` into a `Result` of an `Option`.
///
/// `None` will be mapped to `Ok(None)`.
/// `Some(Ok(_))` and `Some(Err(_))` will be mapped to `Ok(Some(_))` and `Err(_)`.
///
/// # Examples
///
/// ```
/// #![feature(transpose_result)]
///
/// #[derive(Debug, Eq, PartialEq)]
/// struct SomeErr;
///
/// let x: Result<Option<i32>, SomeErr> = Ok(Some(5));
/// let y: Option<Result<i32, SomeErr>> = Some(Ok(5));
/// assert_eq!(x, y.transpose());
/// ```
#[inline]
#[unstable(feature = "transpose_result", issue = "47338")]
pub fn transpose(self) -> Result<Option<T>, E> {
match self {
Some(Ok(x)) => Ok(Some(x)),
Some(Err(e)) => Err(e),
None => Ok(None),
}
}
}
// This is a separate function to reduce the code size of .expect() itself.
#[inline(never)]
#[cold]
fn expect_failed(msg: &str) -> ! {
panic!("{}", msg)
}
/////////////////////////////////////////////////////////////////////////////
// Trait implementations
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for Option<T> {
/// Returns [`None`][Option::None].
#[inline]
fn default() -> Option<T> { None }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> IntoIterator for Option<T> {
type Item = T;
type IntoIter = IntoIter<T>;
/// Returns a consuming iterator over the possibly contained value.
///
/// # Examples
///
/// ```
/// let x = Some("string");
/// let v: Vec<&str> = x.into_iter().collect();
/// assert_eq!(v, ["string"]);
///
/// let x = None;
/// let v: Vec<&str> = x.into_iter().collect();
/// assert!(v.is_empty());
/// ```
#[inline]
fn into_iter(self) -> IntoIter<T> {
IntoIter { inner: Item { opt: self } }
}
}
#[stable(since = "1.4.0", feature = "option_iter")]
impl<'a, T> IntoIterator for &'a Option<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(since = "1.4.0", feature = "option_iter")]
impl<'a, T> IntoIterator for &'a mut Option<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> IterMut<'a, T> {
self.iter_mut()
}
}
#[stable(since = "1.12.0", feature = "option_from")]
impl<T> From<T> for Option<T> {
fn from(val: T) -> Option<T> {
Some(val)
}
}
#[stable(feature = "option_ref_from_ref_option", since = "1.30.0")]
impl<'a, T> From<&'a Option<T>> for Option<&'a T> {
fn from(o: &'a Option<T>) -> Option<&'a T> {
o.as_ref()
}
}
#[stable(feature = "option_ref_from_ref_option", since = "1.30.0")]
impl<'a, T> From<&'a mut Option<T>> for Option<&'a mut T> {
fn from(o: &'a mut Option<T>) -> Option<&'a mut T> {
o.as_mut()
}
}
/////////////////////////////////////////////////////////////////////////////
// The Option Iterators
/////////////////////////////////////////////////////////////////////////////
#[derive(Clone, Debug)]
struct Item<A> {
opt: Option<A>
}
impl<A> Iterator for Item<A> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> {
self.opt.take()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.opt {
Some(_) => (1, Some(1)),
None => (0, Some(0)),
}
}
}
impl<A> DoubleEndedIterator for Item<A> {
#[inline]
fn next_back(&mut self) -> Option<A> {
self.opt.take()
}
}
impl<A> ExactSizeIterator for Item<A> {}
impl<A> FusedIterator for Item<A> {}
unsafe impl<A> TrustedLen for Item<A> {}
/// An iterator over a reference to the [`Some`] variant of an [`Option`].
///
/// The iterator yields one value if the [`Option`] is a [`Some`], otherwise none.
///
/// This `struct` is created by the [`Option::iter`] function.
///
/// [`Option`]: enum.Option.html
/// [`Some`]: enum.Option.html#variant.Some
/// [`Option::iter`]: enum.Option.html#method.iter
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Iter<'a, A: 'a> { inner: Item<&'a A> }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, A> Iterator for Iter<'a, A> {
type Item = &'a A;
#[inline]
fn next(&mut self) -> Option<&'a A> { self.inner.next() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, A> DoubleEndedIterator for Iter<'a, A> {
#[inline]
fn next_back(&mut self) -> Option<&'a A> { self.inner.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> ExactSizeIterator for Iter<'_, A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<A> FusedIterator for Iter<'_, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<A> TrustedLen for Iter<'_, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Clone for Iter<'_, A> {
#[inline]
fn clone(&self) -> Self {
Iter { inner: self.inner.clone() }
}
}
/// An iterator over a mutable reference to the [`Some`] variant of an [`Option`].
///
/// The iterator yields one value if the [`Option`] is a [`Some`], otherwise none.
///
/// This `struct` is created by the [`Option::iter_mut`] function.
///
/// [`Option`]: enum.Option.html
/// [`Some`]: enum.Option.html#variant.Some
/// [`Option::iter_mut`]: enum.Option.html#method.iter_mut
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct IterMut<'a, A: 'a> { inner: Item<&'a mut A> }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, A> Iterator for IterMut<'a, A> {
type Item = &'a mut A;
#[inline]
fn next(&mut self) -> Option<&'a mut A> { self.inner.next() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, A> DoubleEndedIterator for IterMut<'a, A> {
#[inline]
fn next_back(&mut self) -> Option<&'a mut A> { self.inner.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> ExactSizeIterator for IterMut<'_, A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<A> FusedIterator for IterMut<'_, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<A> TrustedLen for IterMut<'_, A> {}
/// An iterator over the value in [`Some`] variant of an [`Option`].
///
/// The iterator yields one value if the [`Option`] is a [`Some`], otherwise none.
///
/// This `struct` is created by the [`Option::into_iter`] function.
///
/// [`Option`]: enum.Option.html
/// [`Some`]: enum.Option.html#variant.Some
/// [`Option::into_iter`]: enum.Option.html#method.into_iter
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<A> { inner: Item<A> }
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Iterator for IntoIter<A> {
type Item = A;
#[inline]
fn next(&mut self) -> Option<A> { self.inner.next() }
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> DoubleEndedIterator for IntoIter<A> {
#[inline]
fn next_back(&mut self) -> Option<A> { self.inner.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> ExactSizeIterator for IntoIter<A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<A> FusedIterator for IntoIter<A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<A> TrustedLen for IntoIter<A> {}
/////////////////////////////////////////////////////////////////////////////
// FromIterator
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, V: FromIterator<A>> FromIterator<Option<A>> for Option<V> {
/// Takes each element in the [`Iterator`]: if it is [`None`][Option::None],
/// no further elements are taken, and the [`None`][Option::None] is
/// returned. Should no [`None`][Option::None] occur, a container with the
/// values of each [`Option`] is returned.
///
/// Here is an example which increments every integer in a vector,
/// checking for overflow:
///
/// ```
/// use std::u16;
///
/// let v = vec![1, 2];
/// let res: Option<Vec<u16>> = v.iter().map(|&x: &u16|
/// if x == u16::MAX { None }
/// else { Some(x + 1) }
/// ).collect();
/// assert!(res == Some(vec![2, 3]));
/// ```
///
/// [`Iterator`]: ../iter/trait.Iterator.html
#[inline]
fn from_iter<I: IntoIterator<Item=Option<A>>>(iter: I) -> Option<V> {
// FIXME(#11084): This could be replaced with Iterator::scan when this
// performance bug is closed.
struct Adapter<Iter> {
iter: Iter,
found_none: bool,
}
impl<T, Iter: Iterator<Item=Option<T>>> Iterator for Adapter<Iter> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
match self.iter.next() {
Some(Some(value)) => Some(value),
Some(None) => {
self.found_none = true;
None
}
None => None,
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.found_none {
(0, Some(0))
} else {
let (_, upper) = self.iter.size_hint();
(0, upper)
}
}
}
let mut adapter = Adapter { iter: iter.into_iter(), found_none: false };
let v: V = FromIterator::from_iter(adapter.by_ref());
if adapter.found_none {
None
} else {
Some(v)
}
}
}
/// The error type that results from applying the try operator (`?`) to a `None` value. If you wish
/// to allow `x?` (where `x` is an `Option<T>`) to be converted into your error type, you can
/// implement `impl From<NoneError>` for `YourErrorType`. In that case, `x?` within a function that
/// returns `Result<_, YourErrorType>` will translate a `None` value into an `Err` result.
#[unstable(feature = "try_trait", issue = "42327")]
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
pub struct NoneError;
#[unstable(feature = "try_trait", issue = "42327")]
impl<T> ops::Try for Option<T> {
type Ok = T;
type Error = NoneError;
#[inline]
fn into_result(self) -> Result<T, NoneError> {
self.ok_or(NoneError)
}
#[inline]
fn from_ok(v: T) -> Self {
Some(v)
}
#[inline]
fn from_error(_: NoneError) -> Self {
None
}
}<|fim▁end|> | |
<|file_name|>1147.py<|end_file_name|><|fim▁begin|>from collections import defaultdict
class Solution:
def longestDecomposition(self, text: str) -> int:
num = 0
L = len(text)
l, r = 0, L - 1
mp1 = defaultdict(int)
mp2 = defaultdict(int)
while l < r:
mp1[text[l]] += 1
mp2[text[r]] += 1
if mp1 == mp2:
num += 2
mp1 = defaultdict(int)
mp2 = defaultdict(int)
l += 1
r -= 1
if not mp1 and not mp2 and l > r:
pass
else:
num += 1
return num
if __name__ == '__main__':<|fim▁hole|> assert Solution().longestDecomposition("antaprezatepzapreanta") == 11
assert Solution().longestDecomposition("aaa") == 3<|fim▁end|> | assert Solution().longestDecomposition("ghiabcdefhelloadamhelloabcdefghi") == 7
assert Solution().longestDecomposition("merchant") == 1 |
<|file_name|>ErrorDAO.js<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
CLASS({
name: 'ErrorDAO',
package: 'foam.core.dao',
extends: 'AbstractDAO',
methods: {
put: function(obj, sink) {
sink && sink.error && sink.error('put', obj);
},
remove: function(obj, sink) {
sink && sink.error && sink.error('remove', obj);
}
}
});<|fim▁end|> | * @license
* Copyright 2015 Google Inc. All Rights Reserved. |
<|file_name|>json.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>var _ = require('lodash'),
jsonFormat = require('json-format'),
grunt = require('grunt');
var util = require('../util/util');
module.exports = {
json: function(data, options, generatedContent, callback){
if(_.isString(options.dest)){
grunt.file.write(options.dest + '/json/' + generatedContent.task + '.json', jsonFormat(generatedContent));
grunt.file.write(options.dest + '/json/content/' + data.uuid + '.json', jsonFormat(data));
}
callback(generatedContent);
}
};
//<|fim▁end|> | |
<|file_name|>user.server.model.test.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var should = require('should'),
mongoose = require('mongoose'),
User = mongoose.model('User');
/**
* Globals
*/
var user, user2;
/**
* Unit tests
*/
describe.skip('User Model Unit Tests:', function() {
before(function(done) {
user = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: '[email protected]',
username: 'username',
password: 'password',
provider: 'local'
});
user2 = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: '[email protected]',
username: 'username',
password: 'password',
provider: 'local'
});
done();
});
describe('Method Save', function() {
it('should begin with no users', function(done) {
User.find({}, function(err, users) {
users.should.have.length(0);
done();
});
});
<|fim▁hole|> it('should be able to save without problems', function(done) {
user.save(done);
});
it('should fail to save an existing user again', function(done) {
user.save();
return user2.save(function(err) {
should.exist(err);
done();
});
});
it('should be able to show an error when try to save without first name', function(done) {
user.firstName = '';
return user.save(function(err) {
should.exist(err);
done();
});
});
});
after(function(done) {
User.remove().exec();
done();
});
});<|fim▁end|> | |
<|file_name|>tartunlp_on_all_albums.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand
from django.db.models import Q
from ajapaik.ajapaik.models import Album
class Command(BaseCommand):
help = 'Connects to TartuNLP API and retrieves neuro machine translations for empty name fields'
def handle(self, *args, **options):
albums = Album.objects.exclude(
Q(atype=Album.AUTO) |
Q(name_original_language__isnull=False) |
Q(atype=Album.PERSON) |
Q(atype=Album.COLLECTION)
).filter(
Q(name_et__isnull=False)<|fim▁hole|> | Q(name_lt__isnull=False)
| Q(name_fi__isnull=False)
| Q(name_ru__isnull=False)
| Q(name_de__isnull=False)
| Q(name_en__isnull=False)
)
for each in albums:
print(f'Processing Album {each.pk}')
each: Album
each.fill_untranslated_fields()<|fim▁end|> | | Q(name_lv__isnull=False) |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|># this is the interface for `python archiver`
import archiver
import appdirs
import os
import sys
import pickle
import json
from archiver.archiver import Archiver
from archiver.parser import parseArgs
args = parseArgs()
from edit import edit
# ==============================================
print args
# TODO: see http://stackoverflow.com/questions/13168083/python-raw-input-replacement-that-uses-a-configurable-text-editor
#-- import pdb
#-- pdb.set_trace()
# ------------------------------------------------------------
# load the user data
# ------------------------------------------------------------
# get the user data directory
user_data_dir = appdirs.user_data_dir('FileArchiver', 'jdthorpe')
if not os.path.exists(user_data_dir) :
os.makedirs(user_data_dir)
# LOAD THE INDEX NAMES AND ACTIVE INDEX
indexes_path = os.path.join(user_data_dir,'INDEXES.json')
if os.path.exists(indexes_path):
with open(indexes_path,'rb') as fh:
indexes = json.load(fh)
else:
indexes= {'active':None,'names':[]}
if not os.path.exists(user_data_dir):
os.makedirs(user_data_dir)
def dumpIndexes():
with open(indexes_path,'wb') as fh:
json.dump(indexes,fh)
# ------------------------------------------------------------
# ------------------------------------------------------------
def getActiveName():
# ACTIVE INDEX NUMER
activeIndex = indexes['active']
if activeIndex is None:
print "No active index. Use 'list -i' to list available indexies and 'use' to set an active index."
sys.exit()
# GET THE NAME OF THE INDEX
try:
activeIndexName = indexes['names'][indexes['active']]
except:
print "Invalid index number"
sys.exit()
return activeIndexName
# ------------------------------------------------------------
# READ-WRITE UTILITY FUNCTIONS
# ------------------------------------------------------------
# TODO: catch specific excepitons:
# except IOError:
# # no such file
# except ValueError as e:
# # invalid json file
def readSettings(name):
""" A utility function which loads the index settings from file
"""
try:
with open(os.path.join(user_data_dir,name+".settings"),'rb') as fh:
settings = json.load(fh)
except Exception as e:
print "Error reading index settings"
import pdb
pdb.set_trace()
sys.exit()
return settings
def readData(name):
""" A utility function which loads the index data from file
"""
try:<|fim▁hole|> import pdb
pdb.set_trace()
sys.exit()
return data
def dumpSettings(settings,name):
""" A utility function which saves the index settings to file
"""
try:
with open(os.path.join(user_data_dir,name+".settings"),'wb') as fh:
json.dump(settings,fh)
except Exception as e:
print "Error writing index settings"
import pdb
pdb.set_trace()
sys.exit()
def dumpData(data,name):
""" A utility function which saves the index settings to file
"""
try:
with open(os.path.join(user_data_dir,name+".data"),'wb') as fh:
pickle.dump(data,fh)
except:
print "Error writing index data"
import pdb
pdb.set_trace()
sys.exit()
# ------------------------------------------------------------
# ------------------------------------------------------------
if args.command == 'add':
activeName = getActiveName()
settings = readSettings(activeName)
if args.source is not None:
source = os.path.abspath(args.source)
if not os.path.exists(source):
print 'WARNING: no such directory "%s"'%(source)
elif not os.path.isdir(source):
print 'ERROR: "%s" is not a directory'%(source)
sys.exit()
print 'Adding source directory: %s'%(source)
if not any(samefile(source,f) for f in settings['sourceDirectories']):
settings['sourceDirectories'].append(source)
elif args.exclusions is not None:
import re
try:
re.compile(args.exclusion)
except re.error:
print 'Invalid regular expression "%s"'%(args.exclusion)
sys.exit()
if args.noic:
settings['directoryExclusionPatterns'].append(args.exclusion)
else:
settings['directoryExclusionPatterns'].append((args.exclusion,2)) # re.I == 2
elif args.archive is not None:
raise NotImplementedError
if settings['archiveDirectory'] is not None:
print "Archive path has already been set use 'remove' to delete the archive path before setting a new archive path"
archiveDirectory = os.path.abspath(args.archive)
if not os.path.exists(archiveDirectory):
if args.create :
os.makedirs(archiveDirectory)
else:
print 'ERROR: no such directory "%s"'%(archiveDirectory)
sys.exit()
elif not os.path.isdir(archiveDirectory):
print '"%s" is not a directory'%(archiveDirectory)
sys.exit()
print 'Setting archive directory to: %s'%(archiveDirectory)
settings['archiveDirectory'] = args.archive
else:
raise NotImplementedError
print 'Error in Arg Parser'
sys.exit()
dumpSettings(settings,activeName)
elif args.command == 'list':
if args.sources:
for f in readSettings(getActiveName())['sourceDirectories']:
print f
elif args.exclusions:
for f in readSettings(getActiveName())['directoryExclusionPatterns']:
print f
elif args.archive:
print readSettings(getActiveName())['archiveDirectory']
elif args.files:
archiver = Archiver()
archiver.data = readData(getActiveName())
for f in archiver:
print f
elif args.indexes:
print 'Active Index: %s (*)'%(getActiveName())
print 'Index Names: '
for i,name in enumerate(indexes['names']):
print ' %s %i: %s'%(
(' ','*')[(i == indexes['active'])+0],
i+1,
name,
)
else:
print 'Error in Arg Parser'
elif args.command == 'remove':
activeName = getActiveName()
settings = readSettings(activeName)
if args.source is not None:
if not (1 <= args.source <= len(settings['sourceDirectories'])):
print 'Invalid index %i'%(args.source)
del settings['sourceDirectories'][args.source - 1]
elif args.exclusion is not None:
raise NotImplementedError
if not (1 <= args.exclusion <= len(settings['directoryExclusionPatterns'])):
print 'Invalid index %i'%(args.exclusion)
del settings['directoryExclusionPatterns'][args.exclusion - 1]
elif args.archive is not None:
raise NotImplementedError
settings['archiveDirectory'] = None
else:
raise NotImplementedError
print 'Error in Arg Parser'
sys.exit()
dumpSettings(settings,activeName)
elif args.command == 'update':
activeName = getActiveName()
settings = readSettings(activeName)
if not len(settings['sourceDirectories']):
print "Error: no source directories in the active index. Please add a source directory via 'add -s'"
archiver = Archiver(
settings = readSettings(activeName),
data = readData(activeName))
archiver.update()
dumpSettings(archiver.settings,activeName)
dumpData(archiver.data,activeName)
elif args.command == 'clean':
raise NotImplementedError
activeName = getActiveName()
archiver = Archiver(
settings = readSettings(activeName),
data = readData(activeName))
archiver.clean()
dumpSettings(archiver.settings,activeName)
dumpData(archiver.data,activeName)
elif args.command == 'copy':
raise NotImplementedError
activeName = getActiveName()
settings = readSettings(activeName),
if settings['archiveDirectory'] is None:
print "ERROR Archive directory not set. Use 'add -a' to set the archive directory."
sys.exit()
Index(
settings = settings,
data = readData(activeName)).copy()
elif args.command == 'diskimages':
raise NotImplementedError
if args.size is None or args.size == "DVD":
size = 4.65*1<<20
elif args.size == "CD":
size = 645*1<<20
elif args.size == "DVD":
size = 4.65*1<<20
elif args.size == "DVD-dual":
size = 8.5*1<<30
elif args.size == "BD":
size = 25*1<<30
elif args.size == "BD-dual":
size = 50*1<<30
elif args.size == "BD-tripple":
size = 75*1<<30
elif args.size == "BD-xl":
size = 100*1<<30
else:
try:
size = int(float(args.size))
except:
print 'ERROR: unable to coerce "%s" to float or int'%(args.size)
sys.exit()
activeName = getActiveName()
settings = readSettings(activeName),
# GET THE DIRECTORY ARGUMENT
if args.directory is not None:
directory = args.directory
else:
if settings['archiveDirectory'] is None:
print "ERROR Archive directory not set and no directory specified. Use 'diskimages -d' to specifiy the disk image directory or 'add -a' to set the archive directory."
sys.exit()
else:
directory = os.path.join(settings['archiveDirectory'],'Disk Images')
# VALIDATE THE DIRECTORY
if not os.path.exists(directory):
if args.create :
os.makedirs(directory)
else:
print 'ERROR: no such directory "%s"'%(directory)
sys.exit()
elif not os.path.isdir(directory):
print '"%s" is not a directory'%(directory)
sys.exit()
# get the FPBF argument
if args.fpbf is not None:
FPBF = True
elif args.nofpbf is not None:
FPBF = False
else:
FPBF = sys.platform == 'darwin'
Index( settings = settings,
data = readData(activeName)).diskimages(directory,size,FPBF)
elif args.command == 'settings':
activeName = getActiveName()
if args.export is not None:
raise NotImplementedError
with open(args.export,'rb') as fh:
json.dump(readSettings(activeName),fh,indent=2,separators=(',', ': '))
elif args.load is not None:
raise NotImplementedError
with open(args.export,'wb') as fh:
settings = json.load(fh)
# give a chance for the settings to be validated
try:
archiver = Archiver(settings=settings)
except:
print "ERROR: invalid settings file"
dumpSettings(archiver.settings,args.name)
elif args.edit is not None:
settings = readSettings(activeName)
old = settings['identifierSettings'][args.edit]
new = edit(json.dumps(old,indent=2,separators=(',', ': ')))
settings['identifierSettings'][args.edit]= json.loads(new)
dumpSettings(settings,activeName)
else :
print json.dumps(readSettings(activeName),indent=2,separators=(',', ': '))
elif args.command == 'create':
if args.name in indexes['names']:
print "An index by the name '%s' already exists"%(args.name)
sys.exit()
import re
validater = re.compile(r'^[-() _a-zA-Z0-9](?:[-() _.a-zA-Z0-9]+[-() _a-zA-Z0-9])$')
if validater.match(args.name) is None:
print "ERROR: names must be composed of letters, numbers, hypen, underscore, space and dot charactes an not end or begin with a dot"
sys.exit()
archiver = Index()
dumpSettings(archiver.settings,args.name)
dumpData(archiver.data,args.name)
indexes['names'].append(args.name)
dumpIndexes()
# TODO: check if there are no other indexies. if so, make the new one active.
print "Created index '%s'"%(args.name)
elif args.command == 'save':
raise NotImplementedError
Index( settings = readSettings(getActiveName()),
data = readData(getActiveName())).save(args.filename)
elif args.command == 'use':
print indexes['names']
if not args.name in indexes['names']:
print "ERROR: No such index named '%s'"%(args.name)
sys.exit()
indexes['active'] =indexes['names'].index(args.name)
dumpIndexes()
elif args.command == 'delete':
if not args.name in indexes['names']:
print "ERROR: No such index named '%s'"%(args.name)
sys.exit()
nameIindex = indexes['names'].index(args.name)
if indexes['active'] == nameIindex:
print 'WARNING: deleting active index'
indexes['active'] = None
del indexes['names'][nameIindex]
dumpIndexes()
else :
print "unknown command %s"%(args.command)<|fim▁end|> | with open(os.path.join(user_data_dir,name+".data"),'rb') as fh: data = pickle.load(fh)
except Exception as e:
print "Error reading index data" |
<|file_name|>postgres.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
mchem.postgres
~~~~~~~~~~~~~~
Functions to build and benchmark PostgreSQL database for comparison.
:copyright: Copyright 2014 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import logging
import time
import click
import numpy as np
import psycopg2
from psycopg2.extensions import AsIs
log = logging.getLogger(__name__)
# Start by creating the database and loading the chembl dump via the command line:
# createdb chembl
# psql chembl < chembl_19.pgdump.sql
@click.group()
@click.option('--db', '-d', default='mchem', envvar='MCHEM_POSTGRES_DB', help='PostgreSQL database name (default: mchem).')
@click.option('--user', '-u', default='root', envvar='MCHEM_POSTGRES_USER', help='PostgreSQL username (default: root).')
@click.option('--password', '-p', default=None, envvar='MCHEM_POSTGRES_PASSWORD', help='PostgreSQL password.')
@click.option('--verbose', '-v', is_flag=True, help='Verbose debug logging.')
@click.help_option('--help', '-h')
@click.pass_context
def cli(ctx, db, user, password, verbose):
"""PostgreSQL command line interface."""
click.echo('Connecting %s@%s' % (user, db))
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO, format='%(levelname)s: %(message)s')
ctx.obj = psycopg2.connect(database=db, user=user, password=password)
@cli.command()
@click.pass_obj
def load(conn):
"""Build PostgreSQL database."""
cur = conn.cursor()
cur.execute('create extension if not exists rdkit;')
cur.execute('create schema rdk;')
cur.execute('drop table if exists biotherapeutics, drug_mechanism, activities, assays, assay_parameters, compound_records, compound_properties, molecule_hierarchy, ligand_eff, predicted_binding_domains, molecule_synonyms, docs, formulations, molecule_atc_classification cascade;')
cur.execute('select * into rdk.mols from (select molregno,mol_from_ctab(molfile::cstring) m from compound_structures) tmp where m is not null;')
cur.execute('create index molidx on rdk.mols using gist(m);')
cur.execute('alter table rdk.mols add primary key (molregno);')
cur.execute('select molregno, m into rdk.fps from rdk.mols;')
cur.execute('alter table rdk.fps add column m2l512 bfp;')
cur.execute('alter table rdk.fps add column m2l2048 bfp;')
cur.execute('alter table rdk.fps add column m2 sfp;')
cur.execute('alter table rdk.fps add column m3 sfp;')
cur.execute('update rdk.fps set m2 = morgan_fp(m);')
cur.execute('update rdk.fps set m3 = morgan_fp(m, 3);')
cur.execute('set rdkit.morgan_fp_size=2048;')
cur.execute('update rdk.fps set m2l2048 = morganbv_fp(m);')
cur.execute('set rdkit.morgan_fp_size=512;')
cur.execute('update rdk.fps set m2l512 = morganbv_fp(m);')
cur.execute('alter table rdk.fps drop column m;')
cur.execute('create index fps_m2_idx on rdk.fps using gist(m2);')
cur.execute('create index fps_m3_idx on rdk.fps using gist(m3);')
cur.execute('create index fps_m2l2048_idx on rdk.fps using gist(m2l2048);')
cur.execute('create index fps_m2l512_idx on rdk.fps using gist(m2l512);')
cur.execute('alter table rdk.fps add primary key (molregno);')
conn.commit()
cur.close()
conn.close()
@cli.command()
@click.option('--sample', '-s', type=click.File('r'), help='File containing sample ids.')
@click.option('--fp', '-f', default='m2', type=click.Choice(['m2', 'm3', 'm2l2048', 'm2l512', 'm3l2048', 'm3l512']), help='Fingerprint type (default: m2).')
@click.option('--threshold', '-t', default=0.8, help='Tanimoto threshold (default: 0.8).')
@click.pass_obj
def profile(conn, sample, fp, threshold):
cur = conn.cursor()
mol_ids = sample.read().strip().split('\n')
times = []
cur.execute("set rdkit.tanimoto_threshold=%s;", (threshold,))
for i, mol_id in enumerate(mol_ids[:100]):
log.debug('Query molecule %s of %s: %s' % (i+1, len(mol_ids), mol_id))
# ARGH! The CHEMBL ID vs. molregno thing is a nightmare
cur.execute("select entity_id from chembl_id_lookup where chembl_id = %s", (mol_id,))
molregno = cur.fetchone()[0]
#cur.execute("select m from rdk.mols where molregno = %s", (molregno,))
#smiles = cur.fetchone()[0]
cur.execute("select %s from rdk.fps where molregno = %s", (AsIs(fp), molregno,))
qfp = cur.fetchone()[0]
log.debug(mol_id)
start = time.time()
cur.execute("select molregno from rdk.fps where %s%%%s", (AsIs(fp), qfp,))
#cur.execute("select molregno from rdk.fps where %s%%morganbv_fp(%s)", (fp, smiles,)) # using smiles
results = cur.fetchall()
end = time.time()
times.append(end - start)
# Save results
result = {
'median_time': np.median(times),
'mean_time': np.mean(times),
'fp': fp,
'threshold': threshold
}
log.info(result)
cur.close()
conn.close()
@cli.command()
@click.option('--sample', '-s', type=click.File('r'), help='File containing sample ids.')
@click.option('--fp', default='m2', type=click.Choice(['m2', 'm3', 'm2l2048', 'm2l512', 'm3l2048', 'm3l512']), help='Fingerprint type (default: m2).')
@click.option('--threshold', default=0.8, help='Similarity search threshold (default 0.8).')
@click.pass_obj
def samplesim(conn, sample, threshold, fp):
"""Perform a similarity search on every molecule in sample and print results."""
click.echo('Fingerprint: %s, Threshold: %s' % (fp, threshold))
cur = conn.cursor()
mol_ids = sample.read().strip().split('\n')
cur.execute("set rdkit.tanimoto_threshold=%s;", (threshold,))
for i, mol_id in enumerate(mol_ids[:100]):
click.echo('Query: %s (%s of %s)' % (mol_id, i+1, len(mol_ids)))
cur.execute("select entity_id from chembl_id_lookup where chembl_id = %s", (mol_id,))
molregno = cur.fetchone()[0]
cur.execute("select %s from rdk.fps where molregno = %s", (AsIs(fp), molregno,))
qfp = cur.fetchone()[0]
cur.execute("select molregno from rdk.fps where %s%%%s", (AsIs(fp), qfp,))
results = [r[0] for r in cur.fetchall()]
chembl_ids = []<|fim▁hole|> for mrn in results:
cur.execute("select chembl_id from chembl_id_lookup where entity_id = %s and entity_type = 'COMPOUND'", (mrn,))
chembl_ids.append(cur.fetchone()[0])
click.echo(chembl_ids)
cur.close()
conn.close()<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>################################################################################
# This file is part of IMTAphy
# _____________________________________________________________________________
#
# Copyright (C) 2011
# Institute of Communication Networks (LKN)
# Department of Electrical Engineering and Information Technology (EE & IT)
# Technische Universitaet Muenchen
# Arcisstr. 21
# 80333 Muenchen - Germany
# http://www.lkn.ei.tum.de/~jan/imtaphy/index.html
#
# _____________________________________________________________________________
#
# IMTAphy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# IMTAphy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IMTAphy. If not, see <http://www.gnu.org/licenses/>.
#
#################################################################################
import openwns
import openwns.node
import openwns.geometry.position
import imtaphy.Station
import imtaphy.Logger
import imtaphy.Channel
import imtaphy.Pathloss
import imtaphy.Scanner
import imtaphy.LinkManagement
import imtaphy.SCM
import imtaphy.ScenarioSupport
import imtaphy.Antenna
import imtaphy.Logger
import imtaphy.Receiver
import imtaphy.covarianceEstimation
import imtaphy.channelEstimation
import imtaphy.Feedback
import openwns.probebus
from openwns import dB, dBm, fromdB, fromdBm
from openwns.evaluation import *
import math
import random
import ltea.dll.schedulers.downlink
import ltea.dll.schedulers.uplink
import ltea.dll.linkAdaptation.downlink
import ltea.dll.linkAdaptation.uplink
import ltea.evaluation.default
import ltea.helper
simTime = 0.11 # total simulation duration in seconds; choose simTime slightly larger than setting + N*windowSize
windowSize = 0.0750 # window size during which to measure, e.g., throughput
settlingTime = 0.0250 # time at the beginning during which no measurements are taken; windowing starts after settling time
# makes the UEs (see end of file) probe time/frequency samples of the channel gain
# and installs suitable probes
# visualize channels with, e.g. /testConfigs/plotChannel.py output/channelGain_UE3_scmLinkId0_antennaPair00_max.m
dumpChannel = False
# for plotting a scenario view (e.g. SINR / geometry over area)
# enables suitable probes and places mobiles on a uniform grid to sample whole area
plotting = False
# define the resolution for the grid in x and y direction
class probeConfig:
xBins = 25
yBins = 25
# dumps a trace file of all received uplink and downlink transmissions to the output
# directory; can be viewed with IMTAphyViewer. Disabled by default. See bottom of
# config for further options (e.g. restricting to certain cells for speed/file size reasons)
phyTracing = False
# When running standalone, comment the "from openw..." import
# When running a campaign, uncomment the import statement and comment the 2 lines
# class params:
# pass
# For a campaign, comment the params definitions that are set in the campaign config.
#from openwns.wrowser.simdb.SimConfig import params
class params:
pass
params.fdd = "DL" # "DL", "DUPLEX"
params.scenario = "UMa" # "InH", "UMa", "UMi", "RMa", "SMa"
params.scmLinks = "all" #"serving" "all" or "no"
params.seed = 42
params.fullBuffer = True
if not params.fullBuffer:<|fim▁hole|>params.receiver = "MRC" #"NoFilter" # "MMSE" # "MRC"
params.numBSAntennas = 2
params.numMSAntennas = 2
params.numMSperBS = 10
params.msSpeed = 0 # speed in km/h, negative values (msSpeed < 0) means scenario-specific default speed
params.numULPRBs = 50
params.numDLPRBs = 50
params.feedbackDelay = 6
params.cqiUpdateFrequency = 5
params.dlScheduler = "ProportionalFair" #"ProportionalFair" # "ZF""ProportionalFair" #"PU2RC" # ProportionalFair "RoundRobin"
params.pfAlpha = 0.001 # ProportionalFair scheduling fairness tuner with 0 => greedy, 1 => fair
params.laThreshold = 0 #positive value in dB => more conservative link-adaptation
params.precodingMode = "ClosedLoopCodebookBased" #"SingleAntenna" #"NoPrecoding", "ClosedLoopCodebookBased"
params.fixedPMIs = False # true: assign fixed PMIs to each PRB, see below
params.outdoorOnlyUMiLoS = True # assign UMi LoS probabiltiy on outdoor part of distance only. 3GPP pathgain+geometry assumes False, otherwise True is used
params.powerControl = "calibration" # "calibration" or "3GPPdefault"
params.thresholdUL = 0 # uplink LA offset in dB
params.adaptiveUplinkLA = True
params.bsAntennaConfiguration = "C" #"BASESTATIONITU" # "BASESTATIONITU", or "A", "B", "C", "D", "E" for the corresponding 3GPP configs from 36.814
params.channelEstimation = "perfect" # "thermalNoiseBased", "IandNCovarianceBased" with further parameters see below
params.covarianceEstimation = "perfect"# "Wishart32.829" # "Wishart32.829" "None" "equalDiagonal", "perfect", "gaussianError" and "distinguish" (with further parameters)
params.maxRank = 4 # affectes MMSE only: 0 means determine from min(numRx,numTx) antennas; MRC is rank 1 by default
params.pmis = 5 # 1,2, 3, 4, 5, or 15
numberOfCircles = 1 # tier of cell sites surrounding center site (0: 1 site, 1: 7 sites, 2: 19 sites)
random.seed(params.seed) # this fixes the seed for Python within this config.py
# simulator setup stuff
WNS = openwns.Simulator(simulationModel = openwns.node.NodeSimulationModel())
openwns.setSimulator(WNS)
WNS.maxSimTime = simTime
WNS.rng.seed = params.seed # this fixes the seed for the C++ simulator#
WNS.masterLogger.backtrace.enabled = False
WNS.masterLogger.enabled = True #False
WNS.outputStrategy = openwns.simulator.OutputStrategy.DELETE
WNS.statusWriteInterval = 30 # in seconds
WNS.probesWriteInterval = 3600 # in seconds
######## scenario params ########
wrapAround = True # allows evaluating all cells because it virtually surrounds all cells by all others
msHeight = 1.5 # meters
scenarioConfig = imtaphy.ScenarioSupport.Scenario(params.scenario, numberOfCircles, msHeight)
if plotting:
scenarioConfig.extendBoundingBoxToMultiplesOf(probeConfig.xBins, probeConfig.yBins)
if params.msSpeed < 0:
msSpeedKmh = scenarioConfig.msSpeedKmh
else:
msSpeedKmh = params.msSpeed
if wrapAround and not (params.scenario == 'InH'):
wrapAroundShiftVectors = imtaphy.LinkManagement.computeShiftVectors(scenarioConfig.getInterSiteDistance(), numberOfCircles)
else:
wrapAroundShiftVectors = []
# "scenario" is the variable the wrowser looks for to display the scenario
scenario = scenarioConfig.getBoundingBox()
if params.receiver == "MMSE":
filter = imtaphy.Receiver.MMSEFilter(maxRank = params.maxRank)
# covarianceEstimation = imtaphy.covarianceEstimation.Diagonal()
elif params.receiver == "MMSE-IRC":
filter = imtaphy.Receiver.MMSEFilter(maxRank = params.maxRank)
# covarianceEstimation = imtaphy.covarianceEstimation.Perfect()
elif params.receiver == "MRC":
filter = imtaphy.Receiver.MRCFilter()
# actually, the MRC doees not care about the covariance
# covarianceEstimation = imtaphy.covarianceEstimation.Diagonal()
else:
raise Exception("Bad receiver filter option")
#covarianceEstimation = imtaphy.covarianceEstimation.GaussianError(relativeError_dB = 0.0)
#channelEstimation = imtaphy.channelEstimation.ThermalNoiseBasedGaussianError(errorPowerRelativeToNoise_dB = 3)
if params.channelEstimation == "perfect":
channelEstimation = None
elif params.channelEstimation == "thermalNoiseBased":
channelEstimation = imtaphy.channelEstimation.ThermalNoiseBasedGaussianError(errorPowerRelativeToNoise_dB = 3)
elif params.channelEstimation == "IandNCovarianceBased":
channelEstimation = imtaphy.channelEstimation.IandNCovarianceBasedGaussianError(gainOverIandN_dB = 10, coloredEstimationError = False)
else:
raise Exception("Bad channel estimation option")
if params.covarianceEstimation == "diagonal":
covarianceEstimation = imtaphy.covarianceEstimation.Diagonal()
elif params.covarianceEstimation == "equalDiagonal":
covarianceEstimation = imtaphy.covarianceEstimation.EqualDiagonal()
elif params.covarianceEstimation == "perfect":
covarianceEstimation = imtaphy.covarianceEstimation.Perfect()
elif params.covarianceEstimation == "gaussianError":
# 0 means error as big as I+N cov itself, negative values mean smaller error
covarianceEstimation = imtaphy.covarianceEstimation.GaussianError(relativeError_dB = 0)
elif params.covarianceEstimation == "Wishart32.829":
covarianceEstimation = imtaphy.covarianceEstimation.WishartModel36829(numberOfSamples = 16)
elif params.covarianceEstimation == "distinguish":
covarianceEstimation = imtaphy.covarianceEstimation.IntraAndInterCellDistinguisher(interCellEstimation = imtaphy.covarianceEstimation.WishartModel36829(numberOfSamples = 16),
intraCellEstimation = None)
else:
raise Exception("Bad covariance estimation option")
ueReceiver = imtaphy.Receiver.LinearReceiver(imtaphy.Logger.Logger(params.receiver), filter = filter, noiseFigure = "7 dB",
channelEstimation = channelEstimation, covarianceEstimation = covarianceEstimation)
eNBreceiver = imtaphy.Receiver.LinearReceiver(imtaphy.Logger.Logger(params.receiver), filter = filter, noiseFigure = "5 dB",
channelEstimation = channelEstimation, covarianceEstimation = covarianceEstimation)
feederLoss = 0 # for wideband calibration, set to 2 dB
pathloss = imtaphy.Pathloss.M2135Pathloss(feederLoss = feederLoss)
classifier = imtaphy.LinkManagement.ITUClassifier(params.scenario, onlyOutdoorDistanceUMi = params.outdoorOnlyUMiLoS)
if params.scmLinks == "no":
scm = imtaphy.SCM.No()
linkManager = imtaphy.LinkManagement.LinkManager(classifier = classifier,
scmLinkCriterion = "none",
handoverMargin = "1 dB",
shiftVectors = wrapAroundShiftVectors,
useSCMforRSRP = False)
else:
scm = imtaphy.SCM.M2135SinglePrecision(logger = imtaphy.Logger.Logger("SCM.M2135"), computeEffectiveAntennaGains = False)
linkManager = imtaphy.LinkManagement.LinkManager(classifier = classifier,
scmLinkCriterion = params.scmLinks,
handoverMargin = "1 dB",
shiftVectors = wrapAroundShiftVectors,
useSCMforRSRP = False)
# in case only DL or UL are used, make sure the other direction does not eat too many simulator resources
if (params.fdd == "DL"):
params.numULPRBs = 0
params.offeredULtrafficBps = 1E-10 # setting it to 0 does not work, triggers division by 0
if (params.fdd == "UL"):
params.numDLPRBs = 0
params.offeredDLtrafficBps = 1E-10 # setting it to 0 does not work, triggers division by 0
spectrum = imtaphy.Spectrum.Spectrum(centerFrequencyUplinkHz = scenarioConfig.centerFreqHz, # TODO: different frequencies for UL/DL?
centerFrequencyDownlinkHz = scenarioConfig.centerFreqHz,
numberOfULPRBs = params.numULPRBs,
numberOfDLPRBs = params.numDLPRBs,
prbBandwidthHz = 180000)
channelConfig = imtaphy.Channel.Channel(pathlossModel = pathloss,
spatialChannelModel = scm,
linkManager = linkManager,
spectrum = spectrum)
if (params.fdd=="DL") or (params.fdd=="DUPLEX"):
if params.dlScheduler == "PU2RC" or params.dlScheduler == "ZF":
if params.pmis == 1:
pmis = [0]
elif params.pmis == 2:
pmis = [0, 1]
elif params.pmis == 3:
pmis = [0, 1, 4]
elif params.pmis == 4:
pmis = [0, 1, 4, 5]
elif params.pmis == 5:
pmis = [0, 1, 4, 5, 12]
else:
pmis = range(16)
openwns.simulator.OpenWNS.modules.imtaphy.downlinkFeedbackManager = imtaphy.Feedback.PU2RCFeedbackManager(enabled = True,
pmis = pmis,
precodingMode = params.precodingMode,
numPrbsPerSubband = 1,
cqiUpdateFrequency = params.cqiUpdateFrequency, #5,
rankUpdateFrequency = 10,
feedbackTotalDelay = params.feedbackDelay) #6
from openwns.evaluation import *
node = openwns.evaluation.createSourceNode(WNS, "groupSize")
settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime))
settling.appendChildren(PDF(name = "Group Size",
description = "Group Size",
minXValue = 1,
maxXValue = 4,
resolution = 3))
if params.dlScheduler == "PU2RC":
node = openwns.evaluation.createSourceNode(WNS, "imperfectTransmissionRatio")
settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime))
settling.appendChildren(PDF(name = "Ratio of imperfect transmission resources",
description = "Ratio of imperfect transmission resources",
minXValue = 0,
maxXValue = 1,
resolution = 200))
node = openwns.evaluation.createSourceNode(WNS, "imperfectRetransmissionRatio")
settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime))
settling.appendChildren(PDF(name = "Ratio of imperfect retransmission resources",
description = "Ratio of imperfect retransmission resources",
minXValue = 0,
maxXValue = 1,
resolution = 200))
node = openwns.evaluation.createSourceNode(WNS, "initialFillLevel")
settling = node.appendChildren(SettlingTimeGuard(settlingTime=settlingTime))
settling.appendChildren(PDF(name = "Percentage of resources allocated after initial scheduling",
description = "Percentage of resources allocated after initial scheduling",
minXValue = 0,
maxXValue = 1,
resolution = 200))
else:
if params.fixedPMIs:
openwns.simulator.OpenWNS.modules.imtaphy.downlinkFeedbackManager = imtaphy.Feedback.FixedPMIPRBFeedbackManager(enabled = True,
pmis = range(8),
randomize = False,
fixedRank = 1,
numPrbsPerSubband = 2,
cqiUpdateFrequency = params.cqiUpdateFrequency, #5,
rankUpdateFrequency = 10,
feedbackTotalDelay = params.feedbackDelay) #6
else:
openwns.simulator.OpenWNS.modules.imtaphy.downlinkFeedbackManager = imtaphy.Feedback.LTERel8DownlinkFeedbackManager(enabled = True,
precodingMode = params.precodingMode,
numPrbsPerSubband = 2,
cqiUpdateFrequency = params.cqiUpdateFrequency, #5,
rankUpdateFrequency = 10,
feedbackTotalDelay = params.feedbackDelay) #6
if (params.fdd=="UL") or (params.fdd=="DUPLEX"):
openwns.simulator.OpenWNS.modules.imtaphy.uplinkStatusManager = imtaphy.Feedback.LTERel10UplinkChannelStatusManager(enabled = True,
precodingMode = "NoPrecoding", # or "NoPrecoding" #TODO: make it SingleAntenna
srsUpdateFrequency = 5,
statusDelay = 3) # together with 4 TTIs scheduling delay gives 7 TTIs total delay (cf. 36.814)
# Channel and (currently also) the feedback manager are singletons so we put their configs into the IMTAphy module itself
openwns.simulator.OpenWNS.modules.imtaphy.channelConfig = channelConfig
bsPositions = scenarioConfig.bsPlacer.getPositions()
# The queues should not be too big to avoid using to much memory for storing outgoing packets
# 75376 is the biggest TB size for a single layer with 110 PRBs (20 MHz spectrum) so this should be enough:
maxQueueSize = 75376 * min([params.numMSAntennas, params.numMSAntennas]) * params.numDLPRBs / 100
azimuths = scenarioConfig.getAzimuths()
for pos in bsPositions:
for azimuth in azimuths:
# see 3GPP TR 36.814 for downtilt examples
# 12 degrees for UMa / UMi
downtilt = scenarioConfig.downtilt
pos.z = scenarioConfig.bsHeight
if params.scenario == "InH":
antenna = imtaphy.Antenna.Omnidirectional(type = params.bsAntennaConfiguration,
antennaGain = "0 dB", azimuth = math.radians(azimuth), numElements = params.numBSAntennas,
wavelengthMeters = scenarioConfig.wavelengthMeters)
else:
antenna = imtaphy.Antenna.IMTAdvancedAntenna(type = params.bsAntennaConfiguration, azimuth = math.radians(azimuth),
downtilt = downtilt, antennaGain = "17 dB", numElements = params.numBSAntennas,
wavelengthMeters = scenarioConfig.wavelengthMeters)
if params.dlScheduler != "RoundRobin":
# This is an outer-loop link-adaptation module that dynamically adapts an individual threshold per user based on HARQ ACK/NACK feedback
# In the default config it aims at a first attempt BLER of 10%. For very slow speeds, smaller BLER targets and for higher speeds higher BLER targets might perform better
# Adaptive LA usually performs better than static for most schedulers (e.g., PF) but for TD-RR (e.g., in IMT-A calibration) at higher speeds, static is better
linkAdaptation = ltea.dll.linkAdaptation.downlink.BLERadaptive(threshold_dB = params.laThreshold, offsetDelta = 0.03, updateInterval = 5, targetBLER = 0.1,
rel8Ports = 2, rel10Ports = 0)
else: # no dynamic outer-loop link-adaptation, just add a static threshold
linkAdaptation = ltea.dll.linkAdaptation.downlink.SINRThreshold(threshold_dB = params.laThreshold,
rel8Ports = 2, rel10Ports = 0)
if params.dlScheduler == "ProportionalFair":
dlScheduler = ltea.dll.schedulers.downlink.ProportionalFair(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, queueSize = maxQueueSize, syncHARQ = True)
elif params.dlScheduler == "PU2RC":
dlScheduler = ltea.dll.schedulers.downlink.PU2RCScheduler(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, estimateOther="PERFECT", queueSize = maxQueueSize, syncHARQ = False)
elif params.dlScheduler == "ZF":
dlScheduler = ltea.dll.schedulers.downlink.ZFScheduler(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, queueSize = maxQueueSize, syncHARQ = False)
else:
# prbsPerUser<=0 means allocating all PRBs to the user, otherwise only the indicated number per TTI
dlScheduler = ltea.dll.schedulers.downlink.RoundRobin(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, queueSize = maxQueueSize, prbsPerUser = 0, syncHARQ = False)
#dlScheduler = ltea.dll.schedulers.downlink.MultiUserScheduler(linkAdaptation, txPowerdBmPerPRB = scenarioConfig.bsPerPRBTxPowerdBm, throughputSmoothing = params.pfAlpha, queueSize = maxQueueSize, syncHARQ = True)
if params.adaptiveUplinkLA:
linkAdaptationUL = ltea.dll.linkAdaptation.uplink.Adaptive(fastCrossingWeight = 0.01,
longTimeWeight = 0.005,
crossingThreshold = 45,
threshold_dB = params.thresholdUL)
else:
linkAdaptationUL = ltea.dll.linkAdaptation.uplink.SINRThreshold(threshold_dB = params.thresholdUL)
if params.powerControl == "calibration":
alpha = 1.0
P0dBmPerPRB = -106
if params.powerControl == "3GPPdefault": # see 3GPP Self-evaluation methodology and results / assumptions by Tetsushi Abe, slide 27
alpha = 0.8
if params.scenario == "InH":
P0dBmPerPRB = -80.0
if params.scenario == "UMi":
P0dBmPerPRB = -85.0
if params.scenario == "UMa":
P0dBmPerPRB = -83.0
if params.scenario == "RMa":
P0dBmPerPRB = -84.0
ulScheduler = ltea.dll.schedulers.uplink.RoundRobin(linkAdaptation = linkAdaptationUL, alpha = alpha, P0dBmPerPRB = P0dBmPerPRB,
threegppCalibration = True, Ks = 0, prachPeriod = 99999999999, #no MCS-based PowerControl
pathlossEstimationMethod = "WBL")
WNS.simulationModel.nodes.append(ltea.nodes.eNB(pos, antenna, dlScheduler, ulScheduler, eNBreceiver, windowSize, settlingTime, None, fullBuffer = params.fullBuffer))
if params.scenario == "InH":
msPositions = imtaphy.ScenarioSupport.placeMobilesUniformlyRandomlyInRectangle(params.numMSperBS * len(bsPositions),
bsPositions,
scenarioConfig)
else:
if plotting:
msPositions = imtaphy.ScenarioSupport.placeMobilesEquallyInCells(bsPositions,
scenarioConfig,
probeConfig)
else:
msPositions = imtaphy.ScenarioSupport.placeMobilesUniformlyRandomlyInCells(params.numMSperBS * len(bsPositions) * len(azimuths),
bsPositions,
scenarioConfig)
UEs = []
for pos in msPositions:
pos.z = msHeight
directionOfTravel = random.uniform(-math.pi, math.pi)
arrayBroadsideAzimuth = directionOfTravel + math.pi / 2
if arrayBroadsideAzimuth > math.pi:
arrayBroadsideAzimuth -= 2 * math.pi
antenna = imtaphy.Antenna.Omnidirectional(type = "MobileStationITU", antennaGain = "0 dB",
azimuth = arrayBroadsideAzimuth, numElements = params.numMSAntennas,
wavelengthMeters = scenarioConfig.wavelengthMeters)
ulScheduler = None
ulScheduler = ltea.dll.schedulers.uplink.UE(totalTxPowerdBm = scenarioConfig.msTotalTxPowerdBm)
ue = ltea.nodes.UE(pos, msSpeedKmh, directionOfTravel, antenna, ulScheduler, ueReceiver, windowSize, settlingTime, None, fullBuffer = params.fullBuffer)
UEs.append(ue)
WNS.simulationModel.nodes.append(ue)
if not params.fullBuffer:
ltea.helper.createEPCandTraffic(simulator = WNS,
offeredDLtrafficBps = params.offeredDLtrafficBps,
offeredULtrafficBps = params.offeredULtrafficBps,
packetSize = packetSize,
probeWindow = windowSize,
settlingTime = settlingTime,
useTCP = False,
enableLogger = False)
# see 3GPP TS 36.104 Section 5.6 Channel bandwidth
if params.numDLPRBs == 6:
bandwidthDLHz = 1.4e6
elif params.numDLPRBs == 15:
bandwidthDLHz = 3e6
elif params.numDLPRBs == 25:
bandwidthDLHz = 5e6
elif params.numDLPRBs == 50:
bandwidthDLHz = 1e7
elif params.numDLPRBs == 75:
bandwidthDLHz = 1.5e7
elif params.numDLPRBs == 100:
bandwidthDLHz = 2e7
else:
bandwidthDLHz = 1 # won't make sense but...
if params.numULPRBs == 6:
bandwidthULHz = 1.4e6
elif params.numULPRBs == 15:
bandwidthULHz = 3e6
elif params.numULPRBs == 25:
bandwidthULHz = 5e6
elif params.numULPRBs == 50:
bandwidthULHz = 1e7
elif params.numULPRBs == 75:
bandwidthULHz = 1.5e7
elif params.numULPRBs == 100:
bandwidthULHz = 2e7
else:
bandwidthULHz = 1 # won't make sense but...
if plotting:
ltea.evaluation.default.installProbes(WNS, settlingTime = settlingTime, numBSs = len(bsPositions)*len(azimuths), users = params.numMSperBS, bandwidthDL = bandwidthDLHz, bandwidthUL = bandwidthULHz, restrictToBSIds= None, scenarioConfig = scenarioConfig, probeConfig = probeConfig)
else:
ltea.evaluation.default.installProbes(WNS, settlingTime = settlingTime, numBSs = len(bsPositions)*len(azimuths), users = params.numMSperBS, bandwidthDL = bandwidthDLHz, bandwidthUL = bandwidthULHz, restrictToBSIds= None)
if dumpChannel:
dumpUEsNodeIDs = []
for i in range(0, min(9, len(UEs))):
ue = UEs[i]
ue.dll.enableChannelGainProbing()
dumpUEsNodeIDs.append(ue.nodeID)
ltea.evaluation.default.installChannelPlottingProbes(WNS,
params.numMSAntennas * params.numBSAntennas,
params.numDLPRBs,
500, # TTIs
dumpUEsNodeIDs # list of UE node ids to dump the channel for
)
if phyTracing:
import openwns.evaluation
node = openwns.evaluation.createSourceNode(WNS, "phyRxTracing")
json = openwns.evaluation.JSONTrace(key="__json__", description="PhyInterfaceRx Tracing Test")
centralSite = node.appendChildren(Accept(by = 'BSID', ifIn = [1, 2, 3], suffix="")) # only trace inner site (otherwise trace big/slow to open)
ues = centralSite.appendChildren(Accept(by = 'NodeType', ifIn = [1], suffix="DL"))
ues.appendChildren(json)
eNBs = centralSite.appendChildren(Accept(by = 'NodeType', ifIn = [2], suffix="UL"))
eNBs.appendChildren(json)<|fim▁end|> | params.offeredDLtrafficBps = 1E7 #
params.offeredULtrafficBps = 1E7 #
packetSize = 500 # bytes
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.7
from dragonnest.settings import STATIC_ROOT
from skills.models import *
from PIL import Image, ImageDraw, ImageFont
import os
_ASCIIMAP = [chr(n) for n in ([45] + range(48, 58) + range(65, 91) + [95] + range(97, 123))]
_ALPHABET = dict(zip(range(64), _ASCIIMAP))
_ALPHABET_REVERSE = dict(zip(_ASCIIMAP, range(64)))
_FONT = ImageFont.truetype(os.path.join(STATIC_ROOT, 'font', 'DejaVuSansCondensed-Bold.ttf'), 10)
_FONT_TITLE = ImageFont.truetype(os.path.join(STATIC_ROOT, 'font', 'DejaVuSansCondensed-Bold.ttf'), 18)
def hash_(num, length):
result = ''
while num != 0:
result += _ALPHABET[num&63]
num = num>>6
return result.ljust(length, _ALPHABET[0])
def unhash_(msg):
result = 0
for c in msg[::-1]:
result = (result<<6)|_ALPHABET_REVERSE[c]
return result
def unhash_build(msg, jobs):
assert len(msg) == 60
nums = [ unhash_(msg[n:n+5]) for n in range(0, 60, 5) ]
num_iter = iter(nums)
job_iter = iter(jobs)
result = []
for n in range(24*len(jobs)):
if n%6 == 0:
num = num_iter.next()
if n%24 == 0:
job = job_iter.next()
level = (num>>(25-n%6*5))&31
try:
skill = Skill.objects.filter(job=job, tree_index=n%24).get()
except Skill.DoesNotExist:
skill = None
result.append((skill, level))
return result
def build_img(build_hash, portrait=True):
num = unhash_(build_hash.split('.')[1])
assert num > 128
job = Job.objects.get(id=num>>7)
level = num&127
jobs = []
while True:
jobs.append(job)
if not job.parent:
break
job = job.parent
jobs.sort(key=lambda x: x.id)
slevel = unhash_build(build_hash.split('.')[0], jobs)
iconw = 50 # Size of the skill icon
iconm = 5 # Icon margin
titleh = (_FONT_TITLE.getsize('I')[1] + 2* iconm) * 2 # Job title badge height
gridw = (iconw + iconm) * 4 + iconm # Skill icon grid width
gridh = (iconw + iconm) * 6 + iconm # Skill icon grid height
gridm = 15 # margin between grids
if portrait:
imgw = gridw
imgh = len(jobs) * (gridh + titleh + gridm) - gridm
img = Image.new('RGBA', (imgw, imgh), (0,0,0,0))
else:
imgw = len(jobs) * (gridw + gridm) - gridm
imgh = gridh + titleh
img = Image.new('RGBA', (imgw, imgh), (0,0,0,0))
for n in range(len(slevel)):
if n%24 == 0:
if portrait:
x0 = 0
y0 = (n/24) * (gridh + titleh + gridm)
else:
x0 = (n/24) * (gridw + gridm)
y0 = 0
# Draw Job Name
job = slevel[n][0].job
job_img = draw_text(job.name, font=_FONT_TITLE)
w, h = job_img.size
x = x0 + (gridw - w) / 2
img.paste(job_img, (x, y0 + iconm), job_img)
x = x0 + iconm + (n % 4) * (iconm + iconw)
y = y0 + titleh + (n%24) / 4 * (iconm + iconw)
if slevel[n][0] is None:
continue
<|fim▁hole|> if slevel[n][1] > 0:
img_path = os.path.join(STATIC_ROOT, 'img', 'hi', '%d.png'%img_path)
else:
img_path = os.path.join(STATIC_ROOT, 'img', 'lo', '%d.png'%img_path)
# Crop the icon from the imagemap
cropx = iconw*((slevel[n][0].icon%100)%10)
cropy = iconw*((slevel[n][0].icon%100)/10)
box = (cropx, cropy, cropx+iconw, cropy+iconw)
skill_img = Image.open(img_path).convert('RGBA')
skill_img = skill_img.crop(box)
img.paste(skill_img, (x,y), skill_img)
# Draw the skill level badge
msg = '%d/%d' % (slevel[n][1], SkillLevel.objects.filter(skill=slevel[n][0], required_level__lte=level).count())
badge_img = draw_text(msg)
w, h = badge_img.size
img.paste(badge_img, (x+iconw-w,y+iconw-h), badge_img)
return img
def draw_text(msg, font=_FONT):
w, h = font.getsize(msg)
m = h/2
scale = 16
bw, bh = (w+h)*scale, h*2*scale
badge = Image.new('RGBA', (bw, bh), (0,0,0,0))
draw = ImageDraw.Draw(badge)
draw.pieslice((0,0,bh,bh), 90, 270, fill='#999999')
draw.pieslice((bw-bh,0,bw,bh), -90, 90, fill='#999999')
draw.rectangle((bh/2,0,bw-bh/2,bh), fill='#999999')
badge = badge.resize((w+h,h*2), Image.ANTIALIAS)
ImageDraw.Draw(badge).text((m, m+1), msg, font=font, fill='#FFFFFF')
return badge<|fim▁end|> | # Get icon image path
img_path = slevel[n][0].icon/100
img_path = 1 if img_path == 0 else img_path |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | mod tagged_index; |
<|file_name|>VersionHandler.py<|end_file_name|><|fim▁begin|>#!/bin/python
import re
import sys
import os
from datetime import date
class VersionHandler:
def __init__(self, file):
self.file = file
self.major = 0
self.minor = 0
self.revision = 0
self.build = 1
self.touch()
def read(self):
try:
f = open(self.file, 'r')
lines = f.readlines()
f.close()
for line in lines:
self.readline(line)
except IOError as e:
print 'File not found: %s (%s)'%(self.file, e)
sys.exit(1)
def write(self):
try:
d = os.path.dirname(self.file)
if not os.path.exists(d):
os.makedirs(d)
f = open(self.file, 'w')
f.write('version=%d.%d.%d\n'%(self.major, self.minor, self.revision))
f.write('build=%d\n'%(self.build))
f.write('date=%s\n'%(self.date))
f.close()
except IOError as e:
print 'Failed to update: %s (%s)'%(self.file, e)
sys.exit(1)
def readline(self, line):
line = line.strip('\r\n\t ')
if len(line) == 0:
return
try:
m = re.search('(.*)=(.*)$', line)
if not m:
print 'Failed to parse line: %s'%(line.strip('\n\t '))
return
self.set(m.group(1), m.group(2))
except IndexError as e:
print 'Failed to parse line: %s (%s)'%(line.strip('\n\t '),e)
def set(self,k,v):
if k == 'version':
m = re.search('.*(\d).(\d).(\d).*', v)
(self.major, self.minor, self.revision) = [int(e) for e in m.groups()]
elif k == 'build':
self.build = int(v)
elif k == 'date':
self.date = v
def touch(self):
today = date.today()
self.date = today.isoformat()
def version(self):
return '%d.%d.%d.%d'%(self.major, self.minor, self.revision, self.build)
def datestr(self):
return '%s'%self.date
def __str__(self):
return 'version: %s, date %s'%(self.version(), self.date)
def __repr__(self):
return 'version: %s, date %s'%(self.version(), self.date)
def increment(self, key):
if key == 'build':
self.build += 1
elif key == 'revision':
self.revision += 1
self.build = 0
elif key == 'minor':
self.minor += 1
self.revision = 0
self.build = 0
elif key == 'major':
self.major += 1
self.minor = 0
self.revision = 0
self.build = 0
def print_version(self):
print '%d.%d.%d.%d'%(self.major, self.minor, self.revision, self.build)
def write_hpp(self, file):
d = os.path.dirname(file)
if not os.path.exists(d):<|fim▁hole|> os.makedirs(d)
f = open(file, 'w')
(ignored, filename) = os.path.split(file)
name = filename.upper().replace('.', '_')
f.write('#ifndef %s\n'%name)
f.write('#define %s\n'%name)
f.write('#define PRODUCTVER %d,%d,%d,%d\n'%(self.major, self.minor, self.revision, self.build))
f.write('#define STRPRODUCTVER "%d.%d.%d.%d"\n'%(self.major, self.minor, self.revision, self.build))
f.write('#define STRPRODUCTDATE "%s"\n'%(self.date))
f.write('#endif // %s\n'%name)
f.close()<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(append)]
#![feature(arc_unique)]
#![feature(as_unsafe_cell)]
#![feature(borrow_state)]
#![feature(box_raw)]
#![feature(box_syntax)]
#![feature(core)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(drain)]
#![feature(hashmap_hasher)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(owned_ascii_ext)]
#![feature(plugin)]
#![feature(rc_unique)]
#![feature(slice_chars)]<|fim▁hole|>
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc="The script crate contains all matters DOM."]
#![plugin(string_cache_plugin)]
#![plugin(plugins)]
#[macro_use]
extern crate log;
#[macro_use] extern crate bitflags;
extern crate core;
extern crate devtools_traits;
extern crate cssparser;
extern crate euclid;
extern crate html5ever;
extern crate encoding;
extern crate fnv;
extern crate hyper;
extern crate ipc_channel;
extern crate js;
extern crate layout_traits;
extern crate libc;
extern crate msg;
extern crate net_traits;
extern crate num;
extern crate rustc_serialize;
extern crate rustc_unicode;
extern crate serde;
extern crate time;
extern crate canvas;
extern crate canvas_traits;
extern crate rand;
#[macro_use]
extern crate profile_traits;
extern crate script_traits;
extern crate selectors;
extern crate smallvec;
extern crate util;
extern crate websocket;
#[macro_use]
extern crate style;
extern crate unicase;
extern crate url;
extern crate uuid;
extern crate string_cache;
extern crate offscreen_gl_context;
extern crate tendril;
pub mod cors;
pub mod document_loader;
#[macro_use]
pub mod dom;
pub mod parse;
pub mod layout_interface;
mod network_listener;
pub mod page;
pub mod script_task;
mod timers;
pub mod textinput;
pub mod clipboard_provider;
mod devtools;
mod horribly_inefficient_timers;
mod webdriver_handlers;
#[allow(unsafe_code)]
pub fn init() {
unsafe {
assert_eq!(js::jsapi::JS_Init(), 1);
}
}<|fim▁end|> | #![feature(str_utf16)]
#![feature(unicode)]
#![feature(vec_push_all)] |
<|file_name|>configuration.test.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { URI } from 'vs/base/common/uri';
import * as assert from 'assert';
import Severity from 'vs/base/common/severity';
import * as UUID from 'vs/base/common/uuid';
import * as Types from 'vs/base/common/types';
import * as Platform from 'vs/base/common/platform';
import { ValidationStatus } from 'vs/base/common/parsers';
import { ProblemMatcher, FileLocationKind, ProblemPattern, ApplyToKind } from 'vs/workbench/contrib/tasks/common/problemMatcher';
import { WorkspaceFolder, IWorkspace } from 'vs/platform/workspace/common/workspace';
import * as Tasks from 'vs/workbench/contrib/tasks/common/tasks';
import { parse, ParseResult, IProblemReporter, ExternalTaskRunnerConfiguration, CustomTask, TaskConfigSource } from 'vs/workbench/contrib/tasks/common/taskConfiguration';
import { MockContextKeyService } from 'vs/platform/keybinding/test/common/mockKeybindingService';
import { IContext } from 'vs/platform/contextkey/common/contextkey';
import { Workspace } from 'vs/platform/workspace/test/common/testWorkspace';
const workspaceFolder: WorkspaceFolder = new WorkspaceFolder({
uri: URI.file('/workspace/folderOne'),
name: 'folderOne',
index: 0
});
const workspace: IWorkspace = new Workspace('id', [workspaceFolder]);
class ProblemReporter implements IProblemReporter {
private _validationStatus: ValidationStatus = new ValidationStatus();
public receivedMessage: boolean = false;
public lastMessage: string | undefined = undefined;
public info(message: string): void {
this.log(message);
}
public warn(message: string): void {
this.log(message);
}
public error(message: string): void {
this.log(message);
}
public fatal(message: string): void {
this.log(message);
}
public get status(): ValidationStatus {
return this._validationStatus;
}
private log(message: string): void {
this.receivedMessage = true;
this.lastMessage = message;
}
}
class ConfiguationBuilder {
public result: Tasks.Task[];
private builders: CustomTaskBuilder[];
constructor() {
this.result = [];
this.builders = [];
}
public task(name: string, command: string): CustomTaskBuilder {
let builder = new CustomTaskBuilder(this, name, command);
this.builders.push(builder);
this.result.push(builder.result);
return builder;
}
public done(): void {
for (let builder of this.builders) {
builder.done();
}
}
}
class PresentationBuilder {
public result: Tasks.PresentationOptions;
constructor(public parent: CommandConfigurationBuilder) {
this.result = { echo: false, reveal: Tasks.RevealKind.Always, revealProblems: Tasks.RevealProblemKind.Never, focus: false, panel: Tasks.PanelKind.Shared, showReuseMessage: true, clear: false, close: false };
}
public echo(value: boolean): PresentationBuilder {
this.result.echo = value;
return this;
}
public reveal(value: Tasks.RevealKind): PresentationBuilder {
this.result.reveal = value;
return this;
}
public focus(value: boolean): PresentationBuilder {
this.result.focus = value;
return this;
}
public instance(value: Tasks.PanelKind): PresentationBuilder {
this.result.panel = value;
return this;
}
public showReuseMessage(value: boolean): PresentationBuilder {
this.result.showReuseMessage = value;
return this;
}
public close(value: boolean): PresentationBuilder {
this.result.close = value;
return this;
}
public done(): void {
}
}
class CommandConfigurationBuilder {
public result: Tasks.CommandConfiguration;
private presentationBuilder: PresentationBuilder;
constructor(public parent: CustomTaskBuilder, command: string) {
this.presentationBuilder = new PresentationBuilder(this);
this.result = {
name: command,
runtime: Tasks.RuntimeType.Process,
args: [],
options: {
cwd: '${workspaceFolder}'
},
presentation: this.presentationBuilder.result,
suppressTaskName: false
};
}
public name(value: string): CommandConfigurationBuilder {
this.result.name = value;
return this;
}
public runtime(value: Tasks.RuntimeType): CommandConfigurationBuilder {
this.result.runtime = value;
return this;
}
public args(value: string[]): CommandConfigurationBuilder {
this.result.args = value;
return this;
}
public options(value: Tasks.CommandOptions): CommandConfigurationBuilder {
this.result.options = value;
return this;
}
public taskSelector(value: string): CommandConfigurationBuilder {
this.result.taskSelector = value;<|fim▁hole|> return this;
}
public suppressTaskName(value: boolean): CommandConfigurationBuilder {
this.result.suppressTaskName = value;
return this;
}
public presentation(): PresentationBuilder {
return this.presentationBuilder;
}
public done(taskName: string): void {
this.result.args = this.result.args!.map(arg => arg === '$name' ? taskName : arg);
this.presentationBuilder.done();
}
}
class CustomTaskBuilder {
public result: Tasks.CustomTask;
private commandBuilder: CommandConfigurationBuilder;
constructor(public parent: ConfiguationBuilder, name: string, command: string) {
this.commandBuilder = new CommandConfigurationBuilder(this, command);
this.result = new Tasks.CustomTask(
name,
{ kind: Tasks.TaskSourceKind.Workspace, label: 'workspace', config: { workspaceFolder: workspaceFolder, element: undefined, index: -1, file: '.vscode/tasks.json' } },
name,
Tasks.CUSTOMIZED_TASK_TYPE,
this.commandBuilder.result,
false,
{ reevaluateOnRerun: true },
{
identifier: name,
name: name,
isBackground: false,
promptOnClose: true,
problemMatchers: [],
}
);
}
public identifier(value: string): CustomTaskBuilder {
this.result.configurationProperties.identifier = value;
return this;
}
public group(value: string | Tasks.TaskGroup): CustomTaskBuilder {
this.result.configurationProperties.group = value;
return this;
}
public isBackground(value: boolean): CustomTaskBuilder {
this.result.configurationProperties.isBackground = value;
return this;
}
public promptOnClose(value: boolean): CustomTaskBuilder {
this.result.configurationProperties.promptOnClose = value;
return this;
}
public problemMatcher(): ProblemMatcherBuilder {
let builder = new ProblemMatcherBuilder(this);
this.result.configurationProperties.problemMatchers!.push(builder.result);
return builder;
}
public command(): CommandConfigurationBuilder {
return this.commandBuilder;
}
public done(): void {
this.commandBuilder.done(this.result.configurationProperties.name!);
}
}
class ProblemMatcherBuilder {
public static readonly DEFAULT_UUID = UUID.generateUuid();
public result: ProblemMatcher;
constructor(public parent: CustomTaskBuilder) {
this.result = {
owner: ProblemMatcherBuilder.DEFAULT_UUID,
applyTo: ApplyToKind.allDocuments,
severity: undefined,
fileLocation: FileLocationKind.Relative,
filePrefix: '${workspaceFolder}',
pattern: undefined!
};
}
public owner(value: string): ProblemMatcherBuilder {
this.result.owner = value;
return this;
}
public applyTo(value: ApplyToKind): ProblemMatcherBuilder {
this.result.applyTo = value;
return this;
}
public severity(value: Severity): ProblemMatcherBuilder {
this.result.severity = value;
return this;
}
public fileLocation(value: FileLocationKind): ProblemMatcherBuilder {
this.result.fileLocation = value;
return this;
}
public filePrefix(value: string): ProblemMatcherBuilder {
this.result.filePrefix = value;
return this;
}
public pattern(regExp: RegExp): PatternBuilder {
let builder = new PatternBuilder(this, regExp);
if (!this.result.pattern) {
this.result.pattern = builder.result;
}
return builder;
}
}
class PatternBuilder {
public result: ProblemPattern;
constructor(public parent: ProblemMatcherBuilder, regExp: RegExp) {
this.result = {
regexp: regExp,
file: 1,
message: 0,
line: 2,
character: 3
};
}
public file(value: number): PatternBuilder {
this.result.file = value;
return this;
}
public message(value: number): PatternBuilder {
this.result.message = value;
return this;
}
public location(value: number): PatternBuilder {
this.result.location = value;
return this;
}
public line(value: number): PatternBuilder {
this.result.line = value;
return this;
}
public character(value: number): PatternBuilder {
this.result.character = value;
return this;
}
public endLine(value: number): PatternBuilder {
this.result.endLine = value;
return this;
}
public endCharacter(value: number): PatternBuilder {
this.result.endCharacter = value;
return this;
}
public code(value: number): PatternBuilder {
this.result.code = value;
return this;
}
public severity(value: number): PatternBuilder {
this.result.severity = value;
return this;
}
public loop(value: boolean): PatternBuilder {
this.result.loop = value;
return this;
}
}
class TasksMockContextKeyService extends MockContextKeyService {
public override getContext(domNode: HTMLElement): IContext {
return {
getValue: <T>(_key: string) => {
return <T><unknown>true;
}
};
}
}
function testDefaultProblemMatcher(external: ExternalTaskRunnerConfiguration, resolved: number) {
let reporter = new ProblemReporter();
let result = parse(workspaceFolder, workspace, Platform.platform, external, reporter, TaskConfigSource.TasksJson, new TasksMockContextKeyService());
assert.ok(!reporter.receivedMessage);
assert.strictEqual(result.custom.length, 1);
let task = result.custom[0];
assert.ok(task);
assert.strictEqual(task.configurationProperties.problemMatchers!.length, resolved);
}
function testConfiguration(external: ExternalTaskRunnerConfiguration, builder: ConfiguationBuilder): void {
builder.done();
let reporter = new ProblemReporter();
let result = parse(workspaceFolder, workspace, Platform.platform, external, reporter, TaskConfigSource.TasksJson, new TasksMockContextKeyService());
if (reporter.receivedMessage) {
assert.ok(false, reporter.lastMessage);
}
assertConfiguration(result, builder.result);
}
class TaskGroupMap {
private _store: { [key: string]: Tasks.Task[] };
constructor() {
this._store = Object.create(null);
}
public add(group: string, task: Tasks.Task): void {
let tasks = this._store[group];
if (!tasks) {
tasks = [];
this._store[group] = tasks;
}
tasks.push(task);
}
public static assert(actual: TaskGroupMap, expected: TaskGroupMap): void {
let actualKeys = Object.keys(actual._store);
let expectedKeys = Object.keys(expected._store);
if (actualKeys.length === 0 && expectedKeys.length === 0) {
return;
}
assert.strictEqual(actualKeys.length, expectedKeys.length);
actualKeys.forEach(key => assert.ok(expected._store[key]));
expectedKeys.forEach(key => actual._store[key]);
actualKeys.forEach((key) => {
let actualTasks = actual._store[key];
let expectedTasks = expected._store[key];
assert.strictEqual(actualTasks.length, expectedTasks.length);
if (actualTasks.length === 1) {
assert.strictEqual(actualTasks[0].configurationProperties.name, expectedTasks[0].configurationProperties.name);
return;
}
let expectedTaskMap: { [key: string]: boolean } = Object.create(null);
expectedTasks.forEach(task => expectedTaskMap[task.configurationProperties.name!] = true);
actualTasks.forEach(task => delete expectedTaskMap[task.configurationProperties.name!]);
assert.strictEqual(Object.keys(expectedTaskMap).length, 0);
});
}
}
function assertConfiguration(result: ParseResult, expected: Tasks.Task[]): void {
assert.ok(result.validationStatus.isOK());
let actual = result.custom;
assert.strictEqual(typeof actual, typeof expected);
if (!actual) {
return;
}
// We can't compare Ids since the parser uses UUID which are random
// So create a new map using the name.
let actualTasks: { [key: string]: Tasks.Task } = Object.create(null);
let actualId2Name: { [key: string]: string } = Object.create(null);
let actualTaskGroups = new TaskGroupMap();
actual.forEach(task => {
assert.ok(!actualTasks[task.configurationProperties.name!]);
actualTasks[task.configurationProperties.name!] = task;
actualId2Name[task._id] = task.configurationProperties.name!;
let taskId = Tasks.TaskGroup.from(task.configurationProperties.group)?._id;
if (taskId) {
actualTaskGroups.add(taskId, task);
}
});
let expectedTasks: { [key: string]: Tasks.Task } = Object.create(null);
let expectedTaskGroup = new TaskGroupMap();
expected.forEach(task => {
assert.ok(!expectedTasks[task.configurationProperties.name!]);
expectedTasks[task.configurationProperties.name!] = task;
let taskId = Tasks.TaskGroup.from(task.configurationProperties.group)?._id;
if (taskId) {
expectedTaskGroup.add(taskId, task);
}
});
let actualKeys = Object.keys(actualTasks);
assert.strictEqual(actualKeys.length, expected.length);
actualKeys.forEach((key) => {
let actualTask = actualTasks[key];
let expectedTask = expectedTasks[key];
assert.ok(expectedTask);
assertTask(actualTask, expectedTask);
});
TaskGroupMap.assert(actualTaskGroups, expectedTaskGroup);
}
function assertTask(actual: Tasks.Task, expected: Tasks.Task) {
assert.ok(actual._id);
assert.strictEqual(actual.configurationProperties.name, expected.configurationProperties.name, 'name');
if (!Tasks.InMemoryTask.is(actual) && !Tasks.InMemoryTask.is(expected)) {
assertCommandConfiguration(actual.command, expected.command);
}
assert.strictEqual(actual.configurationProperties.isBackground, expected.configurationProperties.isBackground, 'isBackground');
assert.strictEqual(typeof actual.configurationProperties.problemMatchers, typeof expected.configurationProperties.problemMatchers);
assert.strictEqual(actual.configurationProperties.promptOnClose, expected.configurationProperties.promptOnClose, 'promptOnClose');
assert.strictEqual(typeof actual.configurationProperties.group, typeof expected.configurationProperties.group, `group types unequal`);
if (actual.configurationProperties.problemMatchers && expected.configurationProperties.problemMatchers) {
assert.strictEqual(actual.configurationProperties.problemMatchers.length, expected.configurationProperties.problemMatchers.length);
for (let i = 0; i < actual.configurationProperties.problemMatchers.length; i++) {
assertProblemMatcher(actual.configurationProperties.problemMatchers[i], expected.configurationProperties.problemMatchers[i]);
}
}
if (actual.configurationProperties.group && expected.configurationProperties.group) {
if (Types.isString(actual.configurationProperties.group)) {
assert.strictEqual(actual.configurationProperties.group, expected.configurationProperties.group);
} else {
assertGroup(actual.configurationProperties.group as Tasks.TaskGroup, expected.configurationProperties.group as Tasks.TaskGroup);
}
}
}
function assertCommandConfiguration(actual: Tasks.CommandConfiguration, expected: Tasks.CommandConfiguration) {
assert.strictEqual(typeof actual, typeof expected);
if (actual && expected) {
assertPresentation(actual.presentation!, expected.presentation!);
assert.strictEqual(actual.name, expected.name, 'name');
assert.strictEqual(actual.runtime, expected.runtime, 'runtime type');
assert.strictEqual(actual.suppressTaskName, expected.suppressTaskName, 'suppressTaskName');
assert.strictEqual(actual.taskSelector, expected.taskSelector, 'taskSelector');
assert.deepStrictEqual(actual.args, expected.args, 'args');
assert.strictEqual(typeof actual.options, typeof expected.options);
if (actual.options && expected.options) {
assert.strictEqual(actual.options.cwd, expected.options.cwd, 'cwd');
assert.strictEqual(typeof actual.options.env, typeof expected.options.env, 'env');
if (actual.options.env && expected.options.env) {
assert.deepStrictEqual(actual.options.env, expected.options.env, 'env');
}
}
}
}
function assertGroup(actual: Tasks.TaskGroup, expected: Tasks.TaskGroup) {
assert.strictEqual(typeof actual, typeof expected);
if (actual && expected) {
assert.strictEqual(actual._id, expected._id, `group ids unequal. actual: ${actual._id} expected ${expected._id}`);
assert.strictEqual(actual.isDefault, expected.isDefault, `group defaults unequal. actual: ${actual.isDefault} expected ${expected.isDefault}`);
}
}
function assertPresentation(actual: Tasks.PresentationOptions, expected: Tasks.PresentationOptions) {
assert.strictEqual(typeof actual, typeof expected);
if (actual && expected) {
assert.strictEqual(actual.echo, expected.echo);
assert.strictEqual(actual.reveal, expected.reveal);
}
}
function assertProblemMatcher(actual: string | ProblemMatcher, expected: string | ProblemMatcher) {
assert.strictEqual(typeof actual, typeof expected);
if (typeof actual === 'string' && typeof expected === 'string') {
assert.strictEqual(actual, expected, 'Problem matcher references are different');
return;
}
if (typeof actual !== 'string' && typeof expected !== 'string') {
if (expected.owner === ProblemMatcherBuilder.DEFAULT_UUID) {
assert.ok(UUID.isUUID(actual.owner), 'Owner must be a UUID');
} else {
assert.strictEqual(actual.owner, expected.owner);
}
assert.strictEqual(actual.applyTo, expected.applyTo);
assert.strictEqual(actual.severity, expected.severity);
assert.strictEqual(actual.fileLocation, expected.fileLocation);
assert.strictEqual(actual.filePrefix, expected.filePrefix);
if (actual.pattern && expected.pattern) {
assertProblemPatterns(actual.pattern, expected.pattern);
}
}
}
function assertProblemPatterns(actual: ProblemPattern | ProblemPattern[], expected: ProblemPattern | ProblemPattern[]) {
assert.strictEqual(typeof actual, typeof expected);
if (Array.isArray(actual)) {
let actuals = <ProblemPattern[]>actual;
let expecteds = <ProblemPattern[]>expected;
assert.strictEqual(actuals.length, expecteds.length);
for (let i = 0; i < actuals.length; i++) {
assertProblemPattern(actuals[i], expecteds[i]);
}
} else {
assertProblemPattern(<ProblemPattern>actual, <ProblemPattern>expected);
}
}
function assertProblemPattern(actual: ProblemPattern, expected: ProblemPattern) {
assert.strictEqual(actual.regexp.toString(), expected.regexp.toString());
assert.strictEqual(actual.file, expected.file);
assert.strictEqual(actual.message, expected.message);
if (typeof expected.location !== 'undefined') {
assert.strictEqual(actual.location, expected.location);
} else {
assert.strictEqual(actual.line, expected.line);
assert.strictEqual(actual.character, expected.character);
assert.strictEqual(actual.endLine, expected.endLine);
assert.strictEqual(actual.endCharacter, expected.endCharacter);
}
assert.strictEqual(actual.code, expected.code);
assert.strictEqual(actual.severity, expected.severity);
assert.strictEqual(actual.loop, expected.loop);
}
suite('Tasks version 0.1.0', () => {
test('tasks: all default', () => {
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true);
testConfiguration(
{
version: '0.1.0',
command: 'tsc'
}, builder);
});
test('tasks: global isShellCommand', () => {
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
isShellCommand: true
},
builder);
});
test('tasks: global show output silent', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
presentation().reveal(Tasks.RevealKind.Silent);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
showOutput: 'silent'
},
builder
);
});
test('tasks: global promptOnClose default', () => {
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
promptOnClose: true
},
builder
);
});
test('tasks: global promptOnClose', () => {
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
promptOnClose(false).
command().suppressTaskName(true);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
promptOnClose: false
},
builder
);
});
test('tasks: global promptOnClose default watching', () => {
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
isBackground(true).
promptOnClose(false).
command().suppressTaskName(true);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
isWatching: true
},
builder
);
});
test('tasks: global show output never', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
presentation().reveal(Tasks.RevealKind.Never);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
showOutput: 'never'
},
builder
);
});
test('tasks: global echo Command', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
presentation().
echo(true);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
echoCommand: true
},
builder
);
});
test('tasks: global args', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
args(['--p']);
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
args: [
'--p'
]
},
builder
);
});
test('tasks: options - cwd', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
options({
cwd: 'myPath'
});
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
options: {
cwd: 'myPath'
}
},
builder
);
});
test('tasks: options - env', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
options({ cwd: '${workspaceFolder}', env: { key: 'value' } });
testConfiguration(
{
version: '0.1.0',
command: 'tsc',
options: {
env: {
key: 'value'
}
}
},
builder
);
});
test('tasks: os windows', () => {
let name: string = Platform.isWindows ? 'tsc.win' : 'tsc';
let builder = new ConfiguationBuilder();
builder.
task(name, name).
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true);
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
windows: {
command: 'tsc.win'
}
};
testConfiguration(external, builder);
});
test('tasks: os windows & global isShellCommand', () => {
let name: string = Platform.isWindows ? 'tsc.win' : 'tsc';
let builder = new ConfiguationBuilder();
builder.
task(name, name).
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell);
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
isShellCommand: true,
windows: {
command: 'tsc.win'
}
};
testConfiguration(external, builder);
});
test('tasks: os mac', () => {
let name: string = Platform.isMacintosh ? 'tsc.osx' : 'tsc';
let builder = new ConfiguationBuilder();
builder.
task(name, name).
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true);
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
osx: {
command: 'tsc.osx'
}
};
testConfiguration(external, builder);
});
test('tasks: os linux', () => {
let name: string = Platform.isLinux ? 'tsc.linux' : 'tsc';
let builder = new ConfiguationBuilder();
builder.
task(name, name).
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true);
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
linux: {
command: 'tsc.linux'
}
};
testConfiguration(external, builder);
});
test('tasks: overwrite showOutput', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
presentation().reveal(Platform.isWindows ? Tasks.RevealKind.Always : Tasks.RevealKind.Never);
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
showOutput: 'never',
windows: {
showOutput: 'always'
}
};
testConfiguration(external, builder);
});
test('tasks: overwrite echo Command', () => {
let builder = new ConfiguationBuilder();
builder.
task('tsc', 'tsc').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
presentation().
echo(Platform.isWindows ? false : true);
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
echoCommand: true,
windows: {
echoCommand: false
}
};
testConfiguration(external, builder);
});
test('tasks: global problemMatcher one', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
problemMatcher: '$msCompile'
};
testDefaultProblemMatcher(external, 1);
});
test('tasks: global problemMatcher two', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
problemMatcher: ['$eslint-compact', '$msCompile']
};
testDefaultProblemMatcher(external, 2);
});
test('tasks: task definition', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: build task', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
isBuildCommand: true
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').group(Tasks.TaskGroup.Build).command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: default build task', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'build'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('build', 'tsc').group(Tasks.TaskGroup.Build).command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: test task', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
isTestCommand: true
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').group(Tasks.TaskGroup.Test).command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: default test task', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'test'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('test', 'tsc').group(Tasks.TaskGroup.Test).command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: task with values', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'test',
showOutput: 'never',
echoCommand: true,
args: ['--p'],
isWatching: true
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('test', 'tsc').
group(Tasks.TaskGroup.Test).
isBackground(true).
promptOnClose(false).
command().args(['$name', '--p']).
presentation().
echo(true).reveal(Tasks.RevealKind.Never);
testConfiguration(external, builder);
});
test('tasks: task inherits global values', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
showOutput: 'never',
echoCommand: true,
tasks: [
{
taskName: 'test'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('test', 'tsc').
group(Tasks.TaskGroup.Test).
command().args(['$name']).presentation().
echo(true).reveal(Tasks.RevealKind.Never);
testConfiguration(external, builder);
});
test('tasks: problem matcher default', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
problemMatcher: {
pattern: {
regexp: 'abc'
}
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().args(['$name']).parent.
problemMatcher().pattern(/abc/);
testConfiguration(external, builder);
});
test('tasks: problem matcher .* regular expression', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
problemMatcher: {
pattern: {
regexp: '.*'
}
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().args(['$name']).parent.
problemMatcher().pattern(/.*/);
testConfiguration(external, builder);
});
test('tasks: problem matcher owner, applyTo, severity and fileLocation', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
problemMatcher: {
owner: 'myOwner',
applyTo: 'closedDocuments',
severity: 'warning',
fileLocation: 'absolute',
pattern: {
regexp: 'abc'
}
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().args(['$name']).parent.
problemMatcher().
owner('myOwner').
applyTo(ApplyToKind.closedDocuments).
severity(Severity.Warning).
fileLocation(FileLocationKind.Absolute).
filePrefix(undefined!).
pattern(/abc/);
testConfiguration(external, builder);
});
test('tasks: problem matcher fileLocation and filePrefix', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
problemMatcher: {
fileLocation: ['relative', 'myPath'],
pattern: {
regexp: 'abc'
}
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().args(['$name']).parent.
problemMatcher().
fileLocation(FileLocationKind.Relative).
filePrefix('myPath').
pattern(/abc/);
testConfiguration(external, builder);
});
test('tasks: problem pattern location', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
problemMatcher: {
pattern: {
regexp: 'abc',
file: 10,
message: 11,
location: 12,
severity: 13,
code: 14
}
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().args(['$name']).parent.
problemMatcher().
pattern(/abc/).file(10).message(11).location(12).severity(13).code(14);
testConfiguration(external, builder);
});
test('tasks: problem pattern line & column', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
problemMatcher: {
pattern: {
regexp: 'abc',
file: 10,
message: 11,
line: 12,
column: 13,
endLine: 14,
endColumn: 15,
severity: 16,
code: 17
}
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().args(['$name']).parent.
problemMatcher().
pattern(/abc/).file(10).message(11).
line(12).character(13).endLine(14).endCharacter(15).
severity(16).code(17);
testConfiguration(external, builder);
});
test('tasks: prompt on close default', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
promptOnClose(true).
command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: prompt on close watching', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
isWatching: true
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
isBackground(true).promptOnClose(false).
command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: prompt on close set', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskName',
promptOnClose: false
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
promptOnClose(false).
command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: task selector set', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
taskSelector: '/t:',
tasks: [
{
taskName: 'taskName',
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().
taskSelector('/t:').
args(['/t:taskName']);
testConfiguration(external, builder);
});
test('tasks: suppress task name set', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
suppressTaskName: false,
tasks: [
{
taskName: 'taskName',
suppressTaskName: true
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().suppressTaskName(true);
testConfiguration(external, builder);
});
test('tasks: suppress task name inherit', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
suppressTaskName: true,
tasks: [
{
taskName: 'taskName'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskName', 'tsc').
command().suppressTaskName(true);
testConfiguration(external, builder);
});
test('tasks: two tasks', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskNameOne'
},
{
taskName: 'taskNameTwo'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').
command().args(['$name']);
builder.task('taskNameTwo', 'tsc').
command().args(['$name']);
testConfiguration(external, builder);
});
test('tasks: with command', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
tasks: [
{
taskName: 'taskNameOne',
command: 'tsc'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').command().suppressTaskName(true);
testConfiguration(external, builder);
});
test('tasks: two tasks with command', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
tasks: [
{
taskName: 'taskNameOne',
command: 'tsc'
},
{
taskName: 'taskNameTwo',
command: 'dir'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').command().suppressTaskName(true);
builder.task('taskNameTwo', 'dir').command().suppressTaskName(true);
testConfiguration(external, builder);
});
test('tasks: with command and args', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
tasks: [
{
taskName: 'taskNameOne',
command: 'tsc',
isShellCommand: true,
args: ['arg'],
options: {
cwd: 'cwd',
env: {
env: 'env'
}
}
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).args(['arg']).options({ cwd: 'cwd', env: { env: 'env' } });
testConfiguration(external, builder);
});
test('tasks: with command os specific', () => {
let name: string = Platform.isWindows ? 'tsc.win' : 'tsc';
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
tasks: [
{
taskName: 'taskNameOne',
command: 'tsc',
windows: {
command: 'tsc.win'
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', name).command().suppressTaskName(true);
testConfiguration(external, builder);
});
test('tasks: with Windows specific args', () => {
let args: string[] = Platform.isWindows ? ['arg1', 'arg2'] : ['arg1'];
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
tasks: [
{
taskName: 'tsc',
command: 'tsc',
args: ['arg1'],
windows: {
args: ['arg2']
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').command().suppressTaskName(true).args(args);
testConfiguration(external, builder);
});
test('tasks: with Linux specific args', () => {
let args: string[] = Platform.isLinux ? ['arg1', 'arg2'] : ['arg1'];
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
tasks: [
{
taskName: 'tsc',
command: 'tsc',
args: ['arg1'],
linux: {
args: ['arg2']
}
}
]
};
let builder = new ConfiguationBuilder();
builder.task('tsc', 'tsc').command().suppressTaskName(true).args(args);
testConfiguration(external, builder);
});
test('tasks: global command and task command properties', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
tasks: [
{
taskName: 'taskNameOne',
isShellCommand: true,
} as CustomTask
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').command().runtime(Tasks.RuntimeType.Shell).args(['$name']);
testConfiguration(external, builder);
});
test('tasks: global and tasks args', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
args: ['global'],
tasks: [
{
taskName: 'taskNameOne',
args: ['local']
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').command().args(['global', '$name', 'local']);
testConfiguration(external, builder);
});
test('tasks: global and tasks args with task selector', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
command: 'tsc',
args: ['global'],
taskSelector: '/t:',
tasks: [
{
taskName: 'taskNameOne',
args: ['local']
}
]
};
let builder = new ConfiguationBuilder();
builder.task('taskNameOne', 'tsc').command().taskSelector('/t:').args(['global', '/t:taskNameOne', 'local']);
testConfiguration(external, builder);
});
});
suite('Tasks version 2.0.0', () => {
test.skip('Build workspace task', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
tasks: [
{
taskName: 'dir',
command: 'dir',
type: 'shell',
group: 'build'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('dir', 'dir').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test('Global group none', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
command: 'dir',
type: 'shell',
group: 'none'
};
let builder = new ConfiguationBuilder();
builder.task('dir', 'dir').
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test.skip('Global group build', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
command: 'dir',
type: 'shell',
group: 'build'
};
let builder = new ConfiguationBuilder();
builder.task('dir', 'dir').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test.skip('Global group default build', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
command: 'dir',
type: 'shell',
group: { kind: 'build', isDefault: true }
};
let builder = new ConfiguationBuilder();
let taskGroup = Tasks.TaskGroup.Build;
taskGroup.isDefault = true;
builder.task('dir', 'dir').
group(taskGroup).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test('Local group none', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
tasks: [
{
taskName: 'dir',
command: 'dir',
type: 'shell',
group: 'none'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('dir', 'dir').
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test.skip('Local group build', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
tasks: [
{
taskName: 'dir',
command: 'dir',
type: 'shell',
group: 'build'
}
]
};
let builder = new ConfiguationBuilder();
builder.task('dir', 'dir').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test.skip('Local group default build', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
tasks: [
{
taskName: 'dir',
command: 'dir',
type: 'shell',
group: { kind: 'build', isDefault: true }
}
]
};
let builder = new ConfiguationBuilder();
let taskGroup = Tasks.TaskGroup.Build;
taskGroup.isDefault = true;
builder.task('dir', 'dir').
group(taskGroup).
command().suppressTaskName(true).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
});
test('Arg overwrite', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '2.0.0',
tasks: [
{
label: 'echo',
type: 'shell',
command: 'echo',
args: [
'global'
],
windows: {
args: [
'windows'
]
},
linux: {
args: [
'linux'
]
},
osx: {
args: [
'osx'
]
}
}
]
};
let builder = new ConfiguationBuilder();
if (Platform.isWindows) {
builder.task('echo', 'echo').
command().suppressTaskName(true).args(['windows']).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
} else if (Platform.isLinux) {
builder.task('echo', 'echo').
command().suppressTaskName(true).args(['linux']).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
} else if (Platform.isMacintosh) {
builder.task('echo', 'echo').
command().suppressTaskName(true).args(['osx']).
runtime(Tasks.RuntimeType.Shell).
presentation().echo(true);
testConfiguration(external, builder);
}
});
});
suite('Bugs / regression tests', () => {
(Platform.isLinux ? test.skip : test)('Bug 19548', () => {
let external: ExternalTaskRunnerConfiguration = {
version: '0.1.0',
windows: {
command: 'powershell',
options: {
cwd: '${workspaceFolder}'
},
tasks: [
{
taskName: 'composeForDebug',
suppressTaskName: true,
args: [
'-ExecutionPolicy',
'RemoteSigned',
'.\\dockerTask.ps1',
'-ComposeForDebug',
'-Environment',
'debug'
],
isBuildCommand: false,
showOutput: 'always',
echoCommand: true
} as CustomTask
]
},
osx: {
command: '/bin/bash',
options: {
cwd: '${workspaceFolder}'
},
tasks: [
{
taskName: 'composeForDebug',
suppressTaskName: true,
args: [
'-c',
'./dockerTask.sh composeForDebug debug'
],
isBuildCommand: false,
showOutput: 'always'
} as CustomTask
]
}
};
let builder = new ConfiguationBuilder();
if (Platform.isWindows) {
builder.task('composeForDebug', 'powershell').
command().suppressTaskName(true).
args(['-ExecutionPolicy', 'RemoteSigned', '.\\dockerTask.ps1', '-ComposeForDebug', '-Environment', 'debug']).
options({ cwd: '${workspaceFolder}' }).
presentation().echo(true).reveal(Tasks.RevealKind.Always);
testConfiguration(external, builder);
} else if (Platform.isMacintosh) {
builder.task('composeForDebug', '/bin/bash').
command().suppressTaskName(true).
args(['-c', './dockerTask.sh composeForDebug debug']).
options({ cwd: '${workspaceFolder}' }).
presentation().reveal(Tasks.RevealKind.Always);
testConfiguration(external, builder);
}
});
test('Bug 28489', () => {
let external = {
version: '0.1.0',
command: '',
isShellCommand: true,
args: [''],
showOutput: 'always',
'tasks': [
{
taskName: 'build',
command: 'bash',
args: [
'build.sh'
]
}
]
};
let builder = new ConfiguationBuilder();
builder.task('build', 'bash').
group(Tasks.TaskGroup.Build).
command().suppressTaskName(true).
args(['build.sh']).
runtime(Tasks.RuntimeType.Shell);
testConfiguration(external, builder);
});
});<|fim▁end|> | |
<|file_name|>line-weight-rounded.js<|end_file_name|><|fim▁begin|>import { h } from 'omi';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(h("path", {<|fim▁hole|><|fim▁end|> | d: "M20 15H4c-.55 0-1 .45-1 1s.45 1 1 1h16c.55 0 1-.45 1-1s-.45-1-1-1zm0-5H4c-.55 0-1 .45-1 1v1c0 .55.45 1 1 1h16c.55 0 1-.45 1-1v-1c0-.55-.45-1-1-1zm0-6H4c-.55 0-1 .45-1 1v2c0 .55.45 1 1 1h16c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zm.5 15h-17c-.28 0-.5.22-.5.5s.22.5.5.5h17c.28 0 .5-.22.5-.5s-.22-.5-.5-.5z"
}), 'LineWeightRounded'); |
<|file_name|>polyfills.ts<|end_file_name|><|fim▁begin|>/**
* This file includes polyfills needed by Angular and is loaded before the app.
* You can add your own extra polyfills to this file.
*
* This file is divided into 2 sections:
* 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers.
* 2. Application imports. Files imported after ZoneJS that should be loaded before your main
* file.
*
* The current setup is for so-called "evergreen" browsers; the last versions of browsers that
* automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera),
* Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile.
*
* Learn more in https://angular.io/docs/ts/latest/guide/browser-support.html
*/
/***************************************************************************************************
* BROWSER POLYFILLS
*/
/**
* By default, zone.js will patch all possible macroTask and DomEvents
* user can disable parts of macroTask/DomEvents patch by setting following flags
*/
// (window as any).__Zone_disable_requestAnimationFrame = true; // disable patch requestAnimationFrame
// (window as any).__Zone_disable_on_property = true; // disable patch onProperty such as onclick
// (window as any).__zone_symbol__BLACK_LISTED_EVENTS = ['scroll', 'mousemove']; // disable patch specified eventNames<|fim▁hole|>* in IE/Edge developer tools, the addEventListener will also be wrapped by zone.js
* with the following flag, it will bypass `zone.js` patch for IE/Edge
*/
// (window as any).__Zone_enable_cross_context_check = true;
/***************************************************************************************************
* Zone JS is required by default for Angular itself.
*/
import 'zone.js'; // Included with Angular CLI.
/***************************************************************************************************
* APPLICATION IMPORTS
*/
import '@angular/localize/init';<|fim▁end|> |
/* |
<|file_name|>network_management_client.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .operations.application_gateways_operations import ApplicationGatewaysOperations
from .operations.available_endpoint_services_operations import AvailableEndpointServicesOperations
from .operations.express_route_circuit_authorizations_operations import ExpressRouteCircuitAuthorizationsOperations
from .operations.express_route_circuit_peerings_operations import ExpressRouteCircuitPeeringsOperations
from .operations.express_route_circuits_operations import ExpressRouteCircuitsOperations
from .operations.express_route_service_providers_operations import ExpressRouteServiceProvidersOperations
from .operations.load_balancers_operations import LoadBalancersOperations
from .operations.load_balancer_backend_address_pools_operations import LoadBalancerBackendAddressPoolsOperations
from .operations.load_balancer_frontend_ip_configurations_operations import LoadBalancerFrontendIPConfigurationsOperations
from .operations.inbound_nat_rules_operations import InboundNatRulesOperations
from .operations.load_balancer_load_balancing_rules_operations import LoadBalancerLoadBalancingRulesOperations
from .operations.load_balancer_network_interfaces_operations import LoadBalancerNetworkInterfacesOperations
from .operations.load_balancer_probes_operations import LoadBalancerProbesOperations
from .operations.network_interfaces_operations import NetworkInterfacesOperations
from .operations.network_interface_ip_configurations_operations import NetworkInterfaceIPConfigurationsOperations
from .operations.network_interface_load_balancers_operations import NetworkInterfaceLoadBalancersOperations
from .operations.network_security_groups_operations import NetworkSecurityGroupsOperations
from .operations.security_rules_operations import SecurityRulesOperations
from .operations.default_security_rules_operations import DefaultSecurityRulesOperations
from .operations.network_watchers_operations import NetworkWatchersOperations
from .operations.packet_captures_operations import PacketCapturesOperations
from .operations.public_ip_addresses_operations import PublicIPAddressesOperations
from .operations.route_filters_operations import RouteFiltersOperations
from .operations.route_filter_rules_operations import RouteFilterRulesOperations
from .operations.route_tables_operations import RouteTablesOperations
from .operations.routes_operations import RoutesOperations
from .operations.bgp_service_communities_operations import BgpServiceCommunitiesOperations
from .operations.usages_operations import UsagesOperations
from .operations.virtual_networks_operations import VirtualNetworksOperations
from .operations.subnets_operations import SubnetsOperations
from .operations.virtual_network_peerings_operations import VirtualNetworkPeeringsOperations
from .operations.virtual_network_gateways_operations import VirtualNetworkGatewaysOperations
from .operations.virtual_network_gateway_connections_operations import VirtualNetworkGatewayConnectionsOperations
from .operations.local_network_gateways_operations import LocalNetworkGatewaysOperations
from . import models
class NetworkManagementClientConfiguration(AzureConfiguration):
"""Configuration for NetworkManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription credentials which uniquely
identify the Microsoft Azure subscription. The subscription ID forms part
of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'https://management.azure.com'
super(NetworkManagementClientConfiguration, self).__init__(base_url)
self.add_user_agent('azure-mgmt-network/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class NetworkManagementClient(object):
"""Network Client
:ivar config: Configuration for client.
:vartype config: NetworkManagementClientConfiguration
:ivar application_gateways: ApplicationGateways operations
:vartype application_gateways: azure.mgmt.network.v2017_06_01.operations.ApplicationGatewaysOperations
:ivar available_endpoint_services: AvailableEndpointServices operations
:vartype available_endpoint_services: azure.mgmt.network.v2017_06_01.operations.AvailableEndpointServicesOperations
:ivar express_route_circuit_authorizations: ExpressRouteCircuitAuthorizations operations
:vartype express_route_circuit_authorizations: azure.mgmt.network.v2017_06_01.operations.ExpressRouteCircuitAuthorizationsOperations
:ivar express_route_circuit_peerings: ExpressRouteCircuitPeerings operations
:vartype express_route_circuit_peerings: azure.mgmt.network.v2017_06_01.operations.ExpressRouteCircuitPeeringsOperations
:ivar express_route_circuits: ExpressRouteCircuits operations
:vartype express_route_circuits: azure.mgmt.network.v2017_06_01.operations.ExpressRouteCircuitsOperations
:ivar express_route_service_providers: ExpressRouteServiceProviders operations
:vartype express_route_service_providers: azure.mgmt.network.v2017_06_01.operations.ExpressRouteServiceProvidersOperations
:ivar load_balancers: LoadBalancers operations
:vartype load_balancers: azure.mgmt.network.v2017_06_01.operations.LoadBalancersOperations
:ivar load_balancer_backend_address_pools: LoadBalancerBackendAddressPools operations
:vartype load_balancer_backend_address_pools: azure.mgmt.network.v2017_06_01.operations.LoadBalancerBackendAddressPoolsOperations
:ivar load_balancer_frontend_ip_configurations: LoadBalancerFrontendIPConfigurations operations
:vartype load_balancer_frontend_ip_configurations: azure.mgmt.network.v2017_06_01.operations.LoadBalancerFrontendIPConfigurationsOperations
:ivar inbound_nat_rules: InboundNatRules operations
:vartype inbound_nat_rules: azure.mgmt.network.v2017_06_01.operations.InboundNatRulesOperations
:ivar load_balancer_load_balancing_rules: LoadBalancerLoadBalancingRules operations
:vartype load_balancer_load_balancing_rules: azure.mgmt.network.v2017_06_01.operations.LoadBalancerLoadBalancingRulesOperations
:ivar load_balancer_network_interfaces: LoadBalancerNetworkInterfaces operations
:vartype load_balancer_network_interfaces: azure.mgmt.network.v2017_06_01.operations.LoadBalancerNetworkInterfacesOperations
:ivar load_balancer_probes: LoadBalancerProbes operations
:vartype load_balancer_probes: azure.mgmt.network.v2017_06_01.operations.LoadBalancerProbesOperations
:ivar network_interfaces: NetworkInterfaces operations
:vartype network_interfaces: azure.mgmt.network.v2017_06_01.operations.NetworkInterfacesOperations
:ivar network_interface_ip_configurations: NetworkInterfaceIPConfigurations operations
:vartype network_interface_ip_configurations: azure.mgmt.network.v2017_06_01.operations.NetworkInterfaceIPConfigurationsOperations
:ivar network_interface_load_balancers: NetworkInterfaceLoadBalancers operations
:vartype network_interface_load_balancers: azure.mgmt.network.v2017_06_01.operations.NetworkInterfaceLoadBalancersOperations
:ivar network_security_groups: NetworkSecurityGroups operations
:vartype network_security_groups: azure.mgmt.network.v2017_06_01.operations.NetworkSecurityGroupsOperations
:ivar security_rules: SecurityRules operations
:vartype security_rules: azure.mgmt.network.v2017_06_01.operations.SecurityRulesOperations
:ivar default_security_rules: DefaultSecurityRules operations
:vartype default_security_rules: azure.mgmt.network.v2017_06_01.operations.DefaultSecurityRulesOperations
:ivar network_watchers: NetworkWatchers operations
:vartype network_watchers: azure.mgmt.network.v2017_06_01.operations.NetworkWatchersOperations
:ivar packet_captures: PacketCaptures operations
:vartype packet_captures: azure.mgmt.network.v2017_06_01.operations.PacketCapturesOperations
:ivar public_ip_addresses: PublicIPAddresses operations
:vartype public_ip_addresses: azure.mgmt.network.v2017_06_01.operations.PublicIPAddressesOperations
:ivar route_filters: RouteFilters operations
:vartype route_filters: azure.mgmt.network.v2017_06_01.operations.RouteFiltersOperations
:ivar route_filter_rules: RouteFilterRules operations
:vartype route_filter_rules: azure.mgmt.network.v2017_06_01.operations.RouteFilterRulesOperations
:ivar route_tables: RouteTables operations
:vartype route_tables: azure.mgmt.network.v2017_06_01.operations.RouteTablesOperations
:ivar routes: Routes operations
:vartype routes: azure.mgmt.network.v2017_06_01.operations.RoutesOperations
:ivar bgp_service_communities: BgpServiceCommunities operations
:vartype bgp_service_communities: azure.mgmt.network.v2017_06_01.operations.BgpServiceCommunitiesOperations
:ivar usages: Usages operations
:vartype usages: azure.mgmt.network.v2017_06_01.operations.UsagesOperations
:ivar virtual_networks: VirtualNetworks operations
:vartype virtual_networks: azure.mgmt.network.v2017_06_01.operations.VirtualNetworksOperations
:ivar subnets: Subnets operations
:vartype subnets: azure.mgmt.network.v2017_06_01.operations.SubnetsOperations
:ivar virtual_network_peerings: VirtualNetworkPeerings operations
:vartype virtual_network_peerings: azure.mgmt.network.v2017_06_01.operations.VirtualNetworkPeeringsOperations
:ivar virtual_network_gateways: VirtualNetworkGateways operations
:vartype virtual_network_gateways: azure.mgmt.network.v2017_06_01.operations.VirtualNetworkGatewaysOperations
:ivar virtual_network_gateway_connections: VirtualNetworkGatewayConnections operations
:vartype virtual_network_gateway_connections: azure.mgmt.network.v2017_06_01.operations.VirtualNetworkGatewayConnectionsOperations
:ivar local_network_gateways: LocalNetworkGateways operations
:vartype local_network_gateways: azure.mgmt.network.v2017_06_01.operations.LocalNetworkGatewaysOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription credentials which uniquely
identify the Microsoft Azure subscription. The subscription ID forms part
of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = NetworkManagementClientConfiguration(credentials, subscription_id, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.application_gateways = ApplicationGatewaysOperations(
self._client, self.config, self._serialize, self._deserialize)
self.available_endpoint_services = AvailableEndpointServicesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_circuit_authorizations = ExpressRouteCircuitAuthorizationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_circuit_peerings = ExpressRouteCircuitPeeringsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_circuits = ExpressRouteCircuitsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_service_providers = ExpressRouteServiceProvidersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancers = LoadBalancersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_backend_address_pools = LoadBalancerBackendAddressPoolsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_frontend_ip_configurations = LoadBalancerFrontendIPConfigurationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.inbound_nat_rules = InboundNatRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_load_balancing_rules = LoadBalancerLoadBalancingRulesOperations(<|fim▁hole|> self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_probes = LoadBalancerProbesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_interfaces = NetworkInterfacesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_interface_ip_configurations = NetworkInterfaceIPConfigurationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_interface_load_balancers = NetworkInterfaceLoadBalancersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_security_groups = NetworkSecurityGroupsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.security_rules = SecurityRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.default_security_rules = DefaultSecurityRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_watchers = NetworkWatchersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.packet_captures = PacketCapturesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.public_ip_addresses = PublicIPAddressesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.route_filters = RouteFiltersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.route_filter_rules = RouteFilterRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.route_tables = RouteTablesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.routes = RoutesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.bgp_service_communities = BgpServiceCommunitiesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.usages = UsagesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_networks = VirtualNetworksOperations(
self._client, self.config, self._serialize, self._deserialize)
self.subnets = SubnetsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_network_peerings = VirtualNetworkPeeringsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_network_gateways = VirtualNetworkGatewaysOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_network_gateway_connections = VirtualNetworkGatewayConnectionsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.local_network_gateways = LocalNetworkGatewaysOperations(
self._client, self.config, self._serialize, self._deserialize)
def check_dns_name_availability(
self, location, domain_name_label=None, custom_headers=None, raw=False, **operation_config):
"""Checks whether a domain name in the cloudapp.net zone is available for
use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must
conform to the following regular expression:
^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
:type domain_name_label: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DnsNameAvailabilityResult or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.network.v2017_06_01.models.DnsNameAvailabilityResult or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-06-01"
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability'
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if domain_name_label is not None:
query_parameters['domainNameLabel'] = self._serialize.query("domain_name_label", domain_name_label, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DnsNameAvailabilityResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized<|fim▁end|> | self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_network_interfaces = LoadBalancerNetworkInterfacesOperations( |
<|file_name|>GetUnitDetailAction.java<|end_file_name|><|fim▁begin|>package br.com.cmabreu.action;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.StrutsStatics;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.InterceptorRef;
import org.apache.struts2.convention.annotation.ParentPackage;
import org.apache.struts2.convention.annotation.Result;
<|fim▁hole|> interceptorRefs= { @InterceptorRef("seguranca") }
)
@ParentPackage("default")
public class GetUnitDetailAction {
public String execute(){
String resposta = "";
try {
HttpServletResponse response = (HttpServletResponse)ActionContext.getContext().get(StrutsStatics.HTTP_RESPONSE);
response.setCharacterEncoding("UTF-8");
response.getWriter().write(resposta);
} catch (IOException ex) {
System.out.println("[GetUnitDetailAction] Erro respondendo AJAX.");
}
return "ok";
}
}<|fim▁end|> | import com.opensymphony.xwork2.ActionContext;
@Action(value="getUnitDetail", results= {
@Result(name="ok", type="httpheader", params={"status", "200"}) },
|
<|file_name|>issue-4427.rs<|end_file_name|><|fim▁begin|>const A: usize =
// Some constant
2;
<|fim▁hole|>const B: usize =
/* constant */
3;
const C: usize = /* foo */ 5;
const D: usize = // baz
/* Some constant */
/* ba */
{
3
// foo
};
const E: usize = /* foo */ 5;
const F: usize = { 7 };
const G: usize =
/* foooooooooooooooooooooooooooooooooooooooooooooooooooooooo0000000000000000xx00 */
5;
const H: usize = /* asdfasdf */
match G > 1 {
true => 1,
false => 3,
};
pub static FOO_BAR: Vec<u8> = //f
{ vec![] };<|fim▁end|> | |
<|file_name|>package-version-file-types-view.js<|end_file_name|><|fim▁begin|>/******************************************************************************\
| |
| package-version-file-types-view.js |
| |
|******************************************************************************|
| |
| This defines an dialog that is used to select directories within |
| package versions. |
| |
|******************************************************************************|
| Copyright (c) 2013 SWAMP - Software Assurance Marketplace |
\******************************************************************************/
define([
'jquery',
'underscore',
'backbone',
'marionette',
'text!templates/packages/info/versions/info/source/dialogs/package-version-file-types.tpl',
'scripts/registry',
'scripts/views/dialogs/error-view',
'scripts/views/files/file-types-list/file-types-list-view'
], function($, _, Backbone, Marionette, Template, Registry, ErrorView, FileTypesListView) {
return Backbone.Marionette.LayoutView.extend({
//
// attributes
//
regions: {
fileTypes: '#file-types'
},
events: {
'click #ok': 'onClickOk',
'keypress': 'onKeyPress'
},
//
// rendering methods
//
template: function() {
return _.template(Template, {
title: this.options.title,
packagePath: this.options.packagePath
});
},
onRender: function() {
this.showFileTypes();
},
showFileTypes: function() {
// fetch package version file types
//
var self = this;
this.model.fetchFileTypes({
data: {
'dirname': this.options.packagePath
},
// callbacks
//
success: function(data) {
var collection = new Backbone.Collection();
for (var key in data) {
collection.add(new Backbone.Model({
'extension': key,
'count': data[key]<|fim▁hole|> collection: collection
})
);
},
error: function() {
// show error dialog
//
Registry.application.modal.show(
new ErrorView({
message: "Could not fetch file types for this package version."
})
);
}
});
},
//
// event handling methods
//
onClickOk: function() {
// apply callback
//
if (this.options.accept) {
this.options.accept();
}
},
onKeyPress: function(event) {
// respond to enter key press
//
if (event.keyCode === 13) {
this.onClickOk();
this.hide();
}
}
});
});<|fim▁end|> | }));
}
self.fileTypes.show(
new FileTypesListView({ |
<|file_name|>GUserDao.java<|end_file_name|><|fim▁begin|>// ----------> GENERATED FILE - DON'T TOUCH! <----------
// generator: ilarkesto.mda.legacy.generator.DaoGenerator
package scrum.server.admin;
import java.util.*;
import ilarkesto.persistence.*;
import ilarkesto.core.logging.Log;
import ilarkesto.base.*;
import ilarkesto.base.time.*;
import ilarkesto.auth.*;
import ilarkesto.fp.*;
public abstract class GUserDao
extends ilarkesto.auth.AUserDao<User> {
public final String getEntityName() {
return User.TYPE;
}
public final Class getEntityClass() {
return User.class;
}
public Set<User> getEntitiesVisibleForUser(final scrum.server.admin.User user) {
return getEntities(new Predicate<User>() {
public boolean test(User e) {
return Auth.isVisible(e, user);
}
});
}
// --- clear caches ---
public void clearCaches() {
namesCache = null;
usersByAdminCache.clear();
usersByEmailVerifiedCache.clear();
emailsCache = null;
usersByCurrentProjectCache.clear();
currentProjectsCache = null;
usersByColorCache.clear();
colorsCache = null;
usersByLastLoginDateAndTimeCache.clear();
lastLoginDateAndTimesCache = null;
usersByRegistrationDateAndTimeCache.clear();
registrationDateAndTimesCache = null;
usersByDisabledCache.clear();
usersByHideUserGuideBlogCache.clear();
usersByHideUserGuideCalendarCache.clear();
usersByHideUserGuideFilesCache.clear();
usersByHideUserGuideForumCache.clear();
usersByHideUserGuideImpedimentsCache.clear();
usersByHideUserGuideIssuesCache.clear();
usersByHideUserGuideJournalCache.clear();
usersByHideUserGuideNextSprintCache.clear();
usersByHideUserGuideProductBacklogCache.clear();
usersByHideUserGuideCourtroomCache.clear();
usersByHideUserGuideQualityBacklogCache.clear();
usersByHideUserGuideReleasesCache.clear();
usersByHideUserGuideRisksCache.clear();
usersByHideUserGuideSprintBacklogCache.clear();
usersByHideUserGuideWhiteboardCache.clear();
loginTokensCache = null;
openIdsCache = null;
}
@Override
public void entityDeleted(EntityEvent event) {
super.entityDeleted(event);
if (event.getEntity() instanceof User) {
clearCaches();
}
}
@Override
public void entitySaved(EntityEvent event) {
super.entitySaved(event);
if (event.getEntity() instanceof User) {
clearCaches();
}
}
// -----------------------------------------------------------
// - name
// -----------------------------------------------------------
public final User getUserByName(java.lang.String name) {
return getEntity(new IsName(name));
}
private Set<java.lang.String> namesCache;
public final Set<java.lang.String> getNames() {
if (namesCache == null) {
namesCache = new HashSet<java.lang.String>();
for (User e : getEntities()) {
if (e.isNameSet()) namesCache.add(e.getName());
}
}
return namesCache;
}
private static class IsName implements Predicate<User> {
private java.lang.String value;
public IsName(java.lang.String value) {
this.value = value;
}
public boolean test(User e) {
return e.isName(value);
}
}
// -----------------------------------------------------------
// - admin
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByAdminCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean admin) {
return getEntities(new IsAdmin(admin));
}
});
public final Set<User> getUsersByAdmin(boolean admin) {
return usersByAdminCache.get(admin);
}
private static class IsAdmin implements Predicate<User> {
private boolean value;
public IsAdmin(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isAdmin();
}
}
// -----------------------------------------------------------
// - emailVerified
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByEmailVerifiedCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean emailVerified) {
return getEntities(new IsEmailVerified(emailVerified));
}
});
public final Set<User> getUsersByEmailVerified(boolean emailVerified) {
return usersByEmailVerifiedCache.get(emailVerified);
}
private static class IsEmailVerified implements Predicate<User> {
private boolean value;
public IsEmailVerified(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isEmailVerified();
}
}
// -----------------------------------------------------------
// - email
// -----------------------------------------------------------
public final User getUserByEmail(java.lang.String email) {
return getEntity(new IsEmail(email));
}
private Set<java.lang.String> emailsCache;
public final Set<java.lang.String> getEmails() {
if (emailsCache == null) {
emailsCache = new HashSet<java.lang.String>();
for (User e : getEntities()) {
if (e.isEmailSet()) emailsCache.add(e.getEmail());
}
}
return emailsCache;
}
private static class IsEmail implements Predicate<User> {
private java.lang.String value;
public IsEmail(java.lang.String value) {
this.value = value;
}
public boolean test(User e) {
return e.isEmail(value);
}
}
// -----------------------------------------------------------
// - currentProject
// -----------------------------------------------------------
private final Cache<scrum.server.project.Project,Set<User>> usersByCurrentProjectCache = new Cache<scrum.server.project.Project,Set<User>>(
new Cache.Factory<scrum.server.project.Project,Set<User>>() {
public Set<User> create(scrum.server.project.Project currentProject) {
return getEntities(new IsCurrentProject(currentProject));
}
});
public final Set<User> getUsersByCurrentProject(scrum.server.project.Project currentProject) {
return usersByCurrentProjectCache.get(currentProject);
}
private Set<scrum.server.project.Project> currentProjectsCache;
public final Set<scrum.server.project.Project> getCurrentProjects() {
if (currentProjectsCache == null) {
currentProjectsCache = new HashSet<scrum.server.project.Project>();
for (User e : getEntities()) {
if (e.isCurrentProjectSet()) currentProjectsCache.add(e.getCurrentProject());
}
}
return currentProjectsCache;
}
private static class IsCurrentProject implements Predicate<User> {
private scrum.server.project.Project value;
public IsCurrentProject(scrum.server.project.Project value) {
this.value = value;
}
public boolean test(User e) {
return e.isCurrentProject(value);
}
}<|fim▁hole|>
private final Cache<java.lang.String,Set<User>> usersByColorCache = new Cache<java.lang.String,Set<User>>(
new Cache.Factory<java.lang.String,Set<User>>() {
public Set<User> create(java.lang.String color) {
return getEntities(new IsColor(color));
}
});
public final Set<User> getUsersByColor(java.lang.String color) {
return usersByColorCache.get(color);
}
private Set<java.lang.String> colorsCache;
public final Set<java.lang.String> getColors() {
if (colorsCache == null) {
colorsCache = new HashSet<java.lang.String>();
for (User e : getEntities()) {
if (e.isColorSet()) colorsCache.add(e.getColor());
}
}
return colorsCache;
}
private static class IsColor implements Predicate<User> {
private java.lang.String value;
public IsColor(java.lang.String value) {
this.value = value;
}
public boolean test(User e) {
return e.isColor(value);
}
}
// -----------------------------------------------------------
// - lastLoginDateAndTime
// -----------------------------------------------------------
private final Cache<ilarkesto.base.time.DateAndTime,Set<User>> usersByLastLoginDateAndTimeCache = new Cache<ilarkesto.base.time.DateAndTime,Set<User>>(
new Cache.Factory<ilarkesto.base.time.DateAndTime,Set<User>>() {
public Set<User> create(ilarkesto.base.time.DateAndTime lastLoginDateAndTime) {
return getEntities(new IsLastLoginDateAndTime(lastLoginDateAndTime));
}
});
public final Set<User> getUsersByLastLoginDateAndTime(ilarkesto.base.time.DateAndTime lastLoginDateAndTime) {
return usersByLastLoginDateAndTimeCache.get(lastLoginDateAndTime);
}
private Set<ilarkesto.base.time.DateAndTime> lastLoginDateAndTimesCache;
public final Set<ilarkesto.base.time.DateAndTime> getLastLoginDateAndTimes() {
if (lastLoginDateAndTimesCache == null) {
lastLoginDateAndTimesCache = new HashSet<ilarkesto.base.time.DateAndTime>();
for (User e : getEntities()) {
if (e.isLastLoginDateAndTimeSet()) lastLoginDateAndTimesCache.add(e.getLastLoginDateAndTime());
}
}
return lastLoginDateAndTimesCache;
}
private static class IsLastLoginDateAndTime implements Predicate<User> {
private ilarkesto.base.time.DateAndTime value;
public IsLastLoginDateAndTime(ilarkesto.base.time.DateAndTime value) {
this.value = value;
}
public boolean test(User e) {
return e.isLastLoginDateAndTime(value);
}
}
// -----------------------------------------------------------
// - registrationDateAndTime
// -----------------------------------------------------------
private final Cache<ilarkesto.base.time.DateAndTime,Set<User>> usersByRegistrationDateAndTimeCache = new Cache<ilarkesto.base.time.DateAndTime,Set<User>>(
new Cache.Factory<ilarkesto.base.time.DateAndTime,Set<User>>() {
public Set<User> create(ilarkesto.base.time.DateAndTime registrationDateAndTime) {
return getEntities(new IsRegistrationDateAndTime(registrationDateAndTime));
}
});
public final Set<User> getUsersByRegistrationDateAndTime(ilarkesto.base.time.DateAndTime registrationDateAndTime) {
return usersByRegistrationDateAndTimeCache.get(registrationDateAndTime);
}
private Set<ilarkesto.base.time.DateAndTime> registrationDateAndTimesCache;
public final Set<ilarkesto.base.time.DateAndTime> getRegistrationDateAndTimes() {
if (registrationDateAndTimesCache == null) {
registrationDateAndTimesCache = new HashSet<ilarkesto.base.time.DateAndTime>();
for (User e : getEntities()) {
if (e.isRegistrationDateAndTimeSet()) registrationDateAndTimesCache.add(e.getRegistrationDateAndTime());
}
}
return registrationDateAndTimesCache;
}
private static class IsRegistrationDateAndTime implements Predicate<User> {
private ilarkesto.base.time.DateAndTime value;
public IsRegistrationDateAndTime(ilarkesto.base.time.DateAndTime value) {
this.value = value;
}
public boolean test(User e) {
return e.isRegistrationDateAndTime(value);
}
}
// -----------------------------------------------------------
// - disabled
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByDisabledCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean disabled) {
return getEntities(new IsDisabled(disabled));
}
});
public final Set<User> getUsersByDisabled(boolean disabled) {
return usersByDisabledCache.get(disabled);
}
private static class IsDisabled implements Predicate<User> {
private boolean value;
public IsDisabled(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isDisabled();
}
}
// -----------------------------------------------------------
// - hideUserGuideBlog
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideBlogCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideBlog) {
return getEntities(new IsHideUserGuideBlog(hideUserGuideBlog));
}
});
public final Set<User> getUsersByHideUserGuideBlog(boolean hideUserGuideBlog) {
return usersByHideUserGuideBlogCache.get(hideUserGuideBlog);
}
private static class IsHideUserGuideBlog implements Predicate<User> {
private boolean value;
public IsHideUserGuideBlog(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideBlog();
}
}
// -----------------------------------------------------------
// - hideUserGuideCalendar
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideCalendarCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideCalendar) {
return getEntities(new IsHideUserGuideCalendar(hideUserGuideCalendar));
}
});
public final Set<User> getUsersByHideUserGuideCalendar(boolean hideUserGuideCalendar) {
return usersByHideUserGuideCalendarCache.get(hideUserGuideCalendar);
}
private static class IsHideUserGuideCalendar implements Predicate<User> {
private boolean value;
public IsHideUserGuideCalendar(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideCalendar();
}
}
// -----------------------------------------------------------
// - hideUserGuideFiles
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideFilesCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideFiles) {
return getEntities(new IsHideUserGuideFiles(hideUserGuideFiles));
}
});
public final Set<User> getUsersByHideUserGuideFiles(boolean hideUserGuideFiles) {
return usersByHideUserGuideFilesCache.get(hideUserGuideFiles);
}
private static class IsHideUserGuideFiles implements Predicate<User> {
private boolean value;
public IsHideUserGuideFiles(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideFiles();
}
}
// -----------------------------------------------------------
// - hideUserGuideForum
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideForumCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideForum) {
return getEntities(new IsHideUserGuideForum(hideUserGuideForum));
}
});
public final Set<User> getUsersByHideUserGuideForum(boolean hideUserGuideForum) {
return usersByHideUserGuideForumCache.get(hideUserGuideForum);
}
private static class IsHideUserGuideForum implements Predicate<User> {
private boolean value;
public IsHideUserGuideForum(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideForum();
}
}
// -----------------------------------------------------------
// - hideUserGuideImpediments
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideImpedimentsCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideImpediments) {
return getEntities(new IsHideUserGuideImpediments(hideUserGuideImpediments));
}
});
public final Set<User> getUsersByHideUserGuideImpediments(boolean hideUserGuideImpediments) {
return usersByHideUserGuideImpedimentsCache.get(hideUserGuideImpediments);
}
private static class IsHideUserGuideImpediments implements Predicate<User> {
private boolean value;
public IsHideUserGuideImpediments(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideImpediments();
}
}
// -----------------------------------------------------------
// - hideUserGuideIssues
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideIssuesCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideIssues) {
return getEntities(new IsHideUserGuideIssues(hideUserGuideIssues));
}
});
public final Set<User> getUsersByHideUserGuideIssues(boolean hideUserGuideIssues) {
return usersByHideUserGuideIssuesCache.get(hideUserGuideIssues);
}
private static class IsHideUserGuideIssues implements Predicate<User> {
private boolean value;
public IsHideUserGuideIssues(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideIssues();
}
}
// -----------------------------------------------------------
// - hideUserGuideJournal
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideJournalCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideJournal) {
return getEntities(new IsHideUserGuideJournal(hideUserGuideJournal));
}
});
public final Set<User> getUsersByHideUserGuideJournal(boolean hideUserGuideJournal) {
return usersByHideUserGuideJournalCache.get(hideUserGuideJournal);
}
private static class IsHideUserGuideJournal implements Predicate<User> {
private boolean value;
public IsHideUserGuideJournal(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideJournal();
}
}
// -----------------------------------------------------------
// - hideUserGuideNextSprint
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideNextSprintCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideNextSprint) {
return getEntities(new IsHideUserGuideNextSprint(hideUserGuideNextSprint));
}
});
public final Set<User> getUsersByHideUserGuideNextSprint(boolean hideUserGuideNextSprint) {
return usersByHideUserGuideNextSprintCache.get(hideUserGuideNextSprint);
}
private static class IsHideUserGuideNextSprint implements Predicate<User> {
private boolean value;
public IsHideUserGuideNextSprint(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideNextSprint();
}
}
// -----------------------------------------------------------
// - hideUserGuideProductBacklog
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideProductBacklogCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideProductBacklog) {
return getEntities(new IsHideUserGuideProductBacklog(hideUserGuideProductBacklog));
}
});
public final Set<User> getUsersByHideUserGuideProductBacklog(boolean hideUserGuideProductBacklog) {
return usersByHideUserGuideProductBacklogCache.get(hideUserGuideProductBacklog);
}
private static class IsHideUserGuideProductBacklog implements Predicate<User> {
private boolean value;
public IsHideUserGuideProductBacklog(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideProductBacklog();
}
}
// -----------------------------------------------------------
// - hideUserGuideCourtroom
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideCourtroomCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideCourtroom) {
return getEntities(new IsHideUserGuideCourtroom(hideUserGuideCourtroom));
}
});
public final Set<User> getUsersByHideUserGuideCourtroom(boolean hideUserGuideCourtroom) {
return usersByHideUserGuideCourtroomCache.get(hideUserGuideCourtroom);
}
private static class IsHideUserGuideCourtroom implements Predicate<User> {
private boolean value;
public IsHideUserGuideCourtroom(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideCourtroom();
}
}
// -----------------------------------------------------------
// - hideUserGuideQualityBacklog
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideQualityBacklogCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideQualityBacklog) {
return getEntities(new IsHideUserGuideQualityBacklog(hideUserGuideQualityBacklog));
}
});
public final Set<User> getUsersByHideUserGuideQualityBacklog(boolean hideUserGuideQualityBacklog) {
return usersByHideUserGuideQualityBacklogCache.get(hideUserGuideQualityBacklog);
}
private static class IsHideUserGuideQualityBacklog implements Predicate<User> {
private boolean value;
public IsHideUserGuideQualityBacklog(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideQualityBacklog();
}
}
// -----------------------------------------------------------
// - hideUserGuideReleases
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideReleasesCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideReleases) {
return getEntities(new IsHideUserGuideReleases(hideUserGuideReleases));
}
});
public final Set<User> getUsersByHideUserGuideReleases(boolean hideUserGuideReleases) {
return usersByHideUserGuideReleasesCache.get(hideUserGuideReleases);
}
private static class IsHideUserGuideReleases implements Predicate<User> {
private boolean value;
public IsHideUserGuideReleases(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideReleases();
}
}
// -----------------------------------------------------------
// - hideUserGuideRisks
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideRisksCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideRisks) {
return getEntities(new IsHideUserGuideRisks(hideUserGuideRisks));
}
});
public final Set<User> getUsersByHideUserGuideRisks(boolean hideUserGuideRisks) {
return usersByHideUserGuideRisksCache.get(hideUserGuideRisks);
}
private static class IsHideUserGuideRisks implements Predicate<User> {
private boolean value;
public IsHideUserGuideRisks(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideRisks();
}
}
// -----------------------------------------------------------
// - hideUserGuideSprintBacklog
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideSprintBacklogCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideSprintBacklog) {
return getEntities(new IsHideUserGuideSprintBacklog(hideUserGuideSprintBacklog));
}
});
public final Set<User> getUsersByHideUserGuideSprintBacklog(boolean hideUserGuideSprintBacklog) {
return usersByHideUserGuideSprintBacklogCache.get(hideUserGuideSprintBacklog);
}
private static class IsHideUserGuideSprintBacklog implements Predicate<User> {
private boolean value;
public IsHideUserGuideSprintBacklog(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideSprintBacklog();
}
}
// -----------------------------------------------------------
// - hideUserGuideWhiteboard
// -----------------------------------------------------------
private final Cache<Boolean,Set<User>> usersByHideUserGuideWhiteboardCache = new Cache<Boolean,Set<User>>(
new Cache.Factory<Boolean,Set<User>>() {
public Set<User> create(Boolean hideUserGuideWhiteboard) {
return getEntities(new IsHideUserGuideWhiteboard(hideUserGuideWhiteboard));
}
});
public final Set<User> getUsersByHideUserGuideWhiteboard(boolean hideUserGuideWhiteboard) {
return usersByHideUserGuideWhiteboardCache.get(hideUserGuideWhiteboard);
}
private static class IsHideUserGuideWhiteboard implements Predicate<User> {
private boolean value;
public IsHideUserGuideWhiteboard(boolean value) {
this.value = value;
}
public boolean test(User e) {
return value == e.isHideUserGuideWhiteboard();
}
}
// -----------------------------------------------------------
// - loginToken
// -----------------------------------------------------------
public final User getUserByLoginToken(java.lang.String loginToken) {
return getEntity(new IsLoginToken(loginToken));
}
private Set<java.lang.String> loginTokensCache;
public final Set<java.lang.String> getLoginTokens() {
if (loginTokensCache == null) {
loginTokensCache = new HashSet<java.lang.String>();
for (User e : getEntities()) {
if (e.isLoginTokenSet()) loginTokensCache.add(e.getLoginToken());
}
}
return loginTokensCache;
}
private static class IsLoginToken implements Predicate<User> {
private java.lang.String value;
public IsLoginToken(java.lang.String value) {
this.value = value;
}
public boolean test(User e) {
return e.isLoginToken(value);
}
}
// -----------------------------------------------------------
// - openId
// -----------------------------------------------------------
public final User getUserByOpenId(java.lang.String openId) {
return getEntity(new IsOpenId(openId));
}
private Set<java.lang.String> openIdsCache;
public final Set<java.lang.String> getOpenIds() {
if (openIdsCache == null) {
openIdsCache = new HashSet<java.lang.String>();
for (User e : getEntities()) {
if (e.isOpenIdSet()) openIdsCache.add(e.getOpenId());
}
}
return openIdsCache;
}
private static class IsOpenId implements Predicate<User> {
private java.lang.String value;
public IsOpenId(java.lang.String value) {
this.value = value;
}
public boolean test(User e) {
return e.isOpenId(value);
}
}
// --- valueObject classes ---
@Override
protected Set<Class> getValueObjectClasses() {
Set<Class> ret = new HashSet<Class>(super.getValueObjectClasses());
return ret;
}
@Override
public Map<String, Class> getAliases() {
Map<String, Class> aliases = new HashMap<String, Class>(super.getAliases());
return aliases;
}
// --- dependencies ---
scrum.server.project.ProjectDao projectDao;
public void setProjectDao(scrum.server.project.ProjectDao projectDao) {
this.projectDao = projectDao;
}
}<|fim▁end|> |
// -----------------------------------------------------------
// - color
// ----------------------------------------------------------- |
<|file_name|>once-cant-call-twice-on-stack.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Testing guarantees provided by once functions.
// This program would segfault if it were legal.
// compile-flags:-Z once-fns
extern mod extra;
use extra::arc;
use std::util;
fn foo(blk: &once fn()) {
blk();
blk(); //~ ERROR use of moved value
}
fn main() {
let x = arc::ARC(true);<|fim▁hole|> do foo {
assert!(*x.get());
util::ignore(x);
}
}<|fim▁end|> | |
<|file_name|>script.py<|end_file_name|><|fim▁begin|>def sum(*args):
total = 0
for number in args:
if isinstance(number, int):
total += number
return total<|fim▁hole|><|fim▁end|> | print(sum(1,5)) |
<|file_name|>big-impl-rfc.rs<|end_file_name|><|fim▁begin|>// rustfmt-fn_args_layout: Block
// rustfmt-fn_call_style: Block
// rustfmt-generics_indent: Block
// rustfmt-where_style: Rfc
// #1357
impl<'a, Select, From, Distinct, Where, Order, Limit, Offset, Groupby, DB> InternalBoxedDsl<'a, DB>
for SelectStatement<Select, From, Distinct, Where, Order, Limit, Offset, GroupBy>
where
DB: Backend,
Select: QueryFragment<DB> + SelectableExpression<From> + 'a,
Distinct: QueryFragment<DB> + 'a,
Where: Into<Option<Box<QueryFragment<DB> + 'a>>>,
Order: QueryFragment<DB> + 'a,
Limit: QueryFragment<DB> + 'a,
Offset: QueryFragment<DB> + 'a,
{
type Output = BoxedSelectStatement<'a, Select::SqlTypeForSelect, From, DB>;
fn internal_into_boxed(self) -> Self::Output {
BoxedSelectStatement::new(
Box::new(self.select),
self.from,
Box::new(self.distinct),
self.where_clause.into(),
Box::new(self.order),
Box::new(self.limit),
Box::new(self.offset),
)
}
}
// #1369
impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName> Foo<|fim▁hole|> for Bar {
fn foo() {}
}
impl Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
for Bar {
fn foo() {}
}
impl<
ExcessivelyLongGenericName,
ExcessivelyLongGenericName,
AnotherExcessivelyLongGenericName,
> Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
for Bar {
fn foo() {}
}
impl<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName> Foo
for Bar<
ExcessivelyLongGenericName,
ExcessivelyLongGenericName,
AnotherExcessivelyLongGenericName,
> {
fn foo() {}
}
impl Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
for Bar<
ExcessivelyLongGenericName,
ExcessivelyLongGenericName,
AnotherExcessivelyLongGenericName,
> {
fn foo() {}
}
impl<
ExcessivelyLongGenericName,
ExcessivelyLongGenericName,
AnotherExcessivelyLongGenericName,
> Foo<ExcessivelyLongGenericName, ExcessivelyLongGenericName, AnotherExcessivelyLongGenericName>
for Bar<
ExcessivelyLongGenericName,
ExcessivelyLongGenericName,
AnotherExcessivelyLongGenericName,
> {
fn foo() {}
}
// #1689
impl<M, S, F, X> SubSelectDirect<M, S, F, X>
where
M: select::Selector,
S: event::Stream,
F: for<'t> FnMut(transform::Api<'t, Stream<ContentStream<S>>>)
-> transform::Api<'t, X>,
X: event::Stream,
{
}<|fim▁end|> | |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
/* jshint node: true */
var logger = require('nlogger').logger(module);
var express = require('express');
var _ = require('underscore');
var config = require('./config');
var poller = require('./poller');
var app = module.exports = express.createServer();
var reports = {};
function reapOldReports() {
logger.info('starting reap cycle on reports: ', _.keys(reports).length);
_.each(_.keys(reports), function(key) {
var report = reports[key];
var age = Math.round(((new Date()).getTime() - report.reported)/1000);
if (age >= config.reapAge) {
logger.info('reaping stale report for: {}:{}', report.host, report.port);
delete reports[key];
}
});
}
poller.emitter.on('report', function(report) {
var key = report.host + ':' + report.port;
reports[key] = report;
});
app.configure(function() {
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(app.router);
app.use(express.static(__dirname + '/pages', { maxAge: 60*60*1000 }));
app.use(express.static(__dirname + '/static', { maxAge: 3*24*60*60*1000 }));
app.use(express.compress());
});
app.configure(function(){
app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
});
app.get('/reports', function(req, res) {
res.setHeader('Content-Type', 'application/json');<|fim▁hole|>
logger.info('starting web UI on port: ', config.httpPort);
app.listen(config.httpPort);
poller.startPollingMasterServer();
setInterval(reapOldReports, config.reapInterval*1000);<|fim▁end|> | res.setHeader('Cache-Control', 'public, max-age=' + 10);
res.send(JSON.stringify(reports));
res.end();
}); |
<|file_name|>channel.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
channel.py
:copyright: (c) 2015 by Fulfil.IO Inc.
:license: see LICENSE for more details.
"""
from trytond.pool import PoolMeta
from trytond.model import fields
__all__ = ['Channel']
__metaclass__ = PoolMeta
def submit_to_google(url, data):
import requests
import json
return requests.post(
url,
data=json.dumps(data),
headers={
'Content-Type': 'application/json',
'Authorization': 'Bearer ya29.5AE7v1wOfgun1gR_iXwuGhMnt8nPNbT4C-Pd39DUnsNGb9I6U5FQqRJXNyPb3a0Dk1OWzA', # noqa
}
)
class Channel:
__name__ = "sale.channel"
website = fields.Many2One('nereid.website', 'Website', select=True)
@classmethod<|fim▁hole|> pass<|fim▁end|> | def upload_products_to_google_merchant(cls): |
<|file_name|>block_header.rs<|end_file_name|><|fim▁begin|>use Encode;
use VarInt;
#[derive(Debug, Encode, PartialEq)]
/// 4 version int32_t Block version information (note, this is signed)
/// 32 prev_block char[32] The hash value of the previous block this particular block references
/// 32 merkle_root char[32] The reference to a Merkle tree collection which is a hash of all transactions related to this block
/// 4 timestamp uint32_t A timestamp recording when this block was created (Will overflow in 2106[2])
/// 4 bits uint32_t The calculated difficulty target being used for this block
/// 4 nonce uint32_t The nonce used to generate this block… to allow variations of the header and compute different hashes
/// 1 txn_count var_int Number of transaction entries, this value is always 0
pub struct BlockHeader {
pub version: i32,
pub prev_block: [u8; 32],
pub merkle_root: [u8; 32],
pub timestamp: u32,
pub bits: u32,
pub nonce: u32,
/// txn_count is a var_int on the wire
pub txn_count: VarInt,<|fim▁hole|><|fim▁end|> | } |
<|file_name|>PaginatedList.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Bill Mill <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# Copyright 2013 davidbrai <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #<|fim▁hole|># #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class PaginatedListBase:
def __init__(self):
self.__elements = list()
def __getitem__(self, index):
assert isinstance(index, (int, slice))
if isinstance(index, (int, long)):
self.__fetchToIndex(index)
return self.__elements[index]
else:
return self._Slice(self, index)
def __iter__(self):
for element in self.__elements:
yield element
while self._couldGrow():
newElements = self._grow()
for element in newElements:
yield element
def _isBiggerThan(self, index):
return len(self.__elements) > index or self._couldGrow()
def __fetchToIndex(self, index):
while len(self.__elements) <= index and self._couldGrow():
self._grow()
def _grow(self):
newElements = self._fetchNextPage()
self.__elements += newElements
return newElements
class _Slice:
def __init__(self, theList, theSlice):
self.__list = theList
self.__start = theSlice.start or 0
self.__stop = theSlice.stop
self.__step = theSlice.step or 1
def __iter__(self):
index = self.__start
while not self.__finished(index):
if self.__list._isBiggerThan(index):
yield self.__list[index]
index += self.__step
else:
return
def __finished(self, index):
return self.__stop is not None and index >= self.__stop
class PaginatedList(PaginatedListBase):
"""
This class abstracts the `pagination of the API <http://developer.github.com/v3/#pagination>`_.
You can simply enumerate through instances of this class::
for repo in user.get_repos():
print repo.name
You can also index them or take slices::
second_repo = user.get_repos()[1]
first_repos = user.get_repos()[:10]
If you want to iterate in reversed order, just do::
for repo in user.get_repos().reversed:
print repo.name
And if you really need it, you can explicitely access a specific page::
some_repos = user.get_repos().get_page(0)
some_other_repos = user.get_repos().get_page(3)
"""
def __init__(self, contentClass, requester, firstUrl, firstParams, headers=None):
PaginatedListBase.__init__(self)
self.__requester = requester
self.__contentClass = contentClass
self.__firstUrl = firstUrl
self.__firstParams = firstParams or ()
self.__nextUrl = firstUrl
self.__nextParams = firstParams or {}
self.__headers = headers
if self.__requester.per_page != 30:
self.__nextParams["per_page"] = self.__requester.per_page
self._reversed = False
self.__totalCount = None
@property
def totalCount(self):
if not self.__totalCount:
self._grow()
return self.__totalCount
def _getLastPageUrl(self):
headers, data = self.__requester.requestJsonAndCheck(
"GET",
self.__firstUrl,
parameters=self.__nextParams,
headers=self.__headers
)
links = self.__parseLinkHeader(headers)
lastUrl = links.get("last")
return lastUrl
@property
def reversed(self):
r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams)
r.__reverse()
return r
def __reverse(self):
self._reversed = True
lastUrl = self._getLastPageUrl()
if lastUrl:
self.__nextUrl = lastUrl
def _couldGrow(self):
return self.__nextUrl is not None
def _fetchNextPage(self):
headers, data = self.__requester.requestJsonAndCheck(
"GET",
self.__nextUrl,
parameters=self.__nextParams,
headers=self.__headers
)
data = data if data else []
self.__nextUrl = None
if len(data) > 0:
links = self.__parseLinkHeader(headers)
if self._reversed:
if "prev" in links:
self.__nextUrl = links["prev"]
elif "next" in links:
self.__nextUrl = links["next"]
self.__nextParams = None
if 'items' in data:
self.__totalCount = data['total_count']
data = data["items"]
content = [
self.__contentClass(self.__requester, headers, element, completed=False)
for element in data if element is not None
]
if self._reversed:
return content[::-1]
return content
def __parseLinkHeader(self, headers):
links = {}
if "link" in headers:
linkHeaders = headers["link"].split(", ")
for linkHeader in linkHeaders:
(url, rel) = linkHeader.split("; ")
url = url[1:-1]
rel = rel[5:-1]
links[rel] = url
return links
def get_page(self, page):
params = dict(self.__firstParams)
if page != 0:
params["page"] = page + 1
if self.__requester.per_page != 30:
params["per_page"] = self.__requester.per_page
headers, data = self.__requester.requestJsonAndCheck(
"GET",
self.__firstUrl,
parameters=params,
headers=self.__headers
)
if 'items' in data:
self.__totalCount = data['total_count']
data = data["items"]
return [
self.__contentClass(self.__requester, headers, element, completed=False)
for element in data
]<|fim▁end|> | # Software Foundation, either version 3 of the License, or (at your option) #
# any later version. # |
<|file_name|>keys.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package client
import (
"context"
"encoding/json"
"errors"
"fmt"
"go.etcd.io/etcd/v3/pkg/pathutil"
"net/http"
"net/url"
"strconv"
"strings"
"time"
)
const (
ErrorCodeKeyNotFound = 100
ErrorCodeTestFailed = 101
ErrorCodeNotFile = 102
ErrorCodeNotDir = 104
ErrorCodeNodeExist = 105
ErrorCodeRootROnly = 107
ErrorCodeDirNotEmpty = 108
ErrorCodeUnauthorized = 110<|fim▁hole|> ErrorCodeInvalidField = 209
ErrorCodeInvalidForm = 210
ErrorCodeRaftInternal = 300
ErrorCodeLeaderElect = 301
ErrorCodeWatcherCleared = 400
ErrorCodeEventIndexCleared = 401
)
type Error struct {
Code int `json:"errorCode"`
Message string `json:"message"`
Cause string `json:"cause"`
Index uint64 `json:"index"`
}
func (e Error) Error() string {
return fmt.Sprintf("%v: %v (%v) [%v]", e.Code, e.Message, e.Cause, e.Index)
}
var (
ErrInvalidJSON = errors.New("client: response is invalid json. The endpoint is probably not valid etcd cluster endpoint")
ErrEmptyBody = errors.New("client: response body is empty")
)
// PrevExistType is used to define an existence condition when setting
// or deleting Nodes.
type PrevExistType string
const (
PrevIgnore = PrevExistType("")
PrevExist = PrevExistType("true")
PrevNoExist = PrevExistType("false")
)
var (
defaultV2KeysPrefix = "/v2/keys"
)
// NewKeysAPI builds a KeysAPI that interacts with etcd's key-value
// API over HTTP.
func NewKeysAPI(c Client) KeysAPI {
return NewKeysAPIWithPrefix(c, defaultV2KeysPrefix)
}
// NewKeysAPIWithPrefix acts like NewKeysAPI, but allows the caller
// to provide a custom base URL path. This should only be used in
// very rare cases.
func NewKeysAPIWithPrefix(c Client, p string) KeysAPI {
return &httpKeysAPI{
client: c,
prefix: p,
}
}
type KeysAPI interface {
// Get retrieves a set of Nodes from etcd
Get(ctx context.Context, key string, opts *GetOptions) (*Response, error)
// Set assigns a new value to a Node identified by a given key. The caller
// may define a set of conditions in the SetOptions. If SetOptions.Dir=true
// then value is ignored.
Set(ctx context.Context, key, value string, opts *SetOptions) (*Response, error)
// Delete removes a Node identified by the given key, optionally destroying
// all of its children as well. The caller may define a set of required
// conditions in an DeleteOptions object.
Delete(ctx context.Context, key string, opts *DeleteOptions) (*Response, error)
// Create is an alias for Set w/ PrevExist=false
Create(ctx context.Context, key, value string) (*Response, error)
// CreateInOrder is used to atomically create in-order keys within the given directory.
CreateInOrder(ctx context.Context, dir, value string, opts *CreateInOrderOptions) (*Response, error)
// Update is an alias for Set w/ PrevExist=true
Update(ctx context.Context, key, value string) (*Response, error)
// Watcher builds a new Watcher targeted at a specific Node identified
// by the given key. The Watcher may be configured at creation time
// through a WatcherOptions object. The returned Watcher is designed
// to emit events that happen to a Node, and optionally to its children.
Watcher(key string, opts *WatcherOptions) Watcher
}
type WatcherOptions struct {
// AfterIndex defines the index after-which the Watcher should
// start emitting events. For example, if a value of 5 is
// provided, the first event will have an index >= 6.
//
// Setting AfterIndex to 0 (default) means that the Watcher
// should start watching for events starting at the current
// index, whatever that may be.
AfterIndex uint64
// Recursive specifies whether or not the Watcher should emit
// events that occur in children of the given keyspace. If set
// to false (default), events will be limited to those that
// occur for the exact key.
Recursive bool
}
type CreateInOrderOptions struct {
// TTL defines a period of time after-which the Node should
// expire and no longer exist. Values <= 0 are ignored. Given
// that the zero-value is ignored, TTL cannot be used to set
// a TTL of 0.
TTL time.Duration
}
type SetOptions struct {
// PrevValue specifies what the current value of the Node must
// be in order for the Set operation to succeed.
//
// Leaving this field empty means that the caller wishes to
// ignore the current value of the Node. This cannot be used
// to compare the Node's current value to an empty string.
//
// PrevValue is ignored if Dir=true
PrevValue string
// PrevIndex indicates what the current ModifiedIndex of the
// Node must be in order for the Set operation to succeed.
//
// If PrevIndex is set to 0 (default), no comparison is made.
PrevIndex uint64
// PrevExist specifies whether the Node must currently exist
// (PrevExist) or not (PrevNoExist). If the caller does not
// care about existence, set PrevExist to PrevIgnore, or simply
// leave it unset.
PrevExist PrevExistType
// TTL defines a period of time after-which the Node should
// expire and no longer exist. Values <= 0 are ignored. Given
// that the zero-value is ignored, TTL cannot be used to set
// a TTL of 0.
TTL time.Duration
// Refresh set to true means a TTL value can be updated
// without firing a watch or changing the node value. A
// value must not be provided when refreshing a key.
Refresh bool
// Dir specifies whether or not this Node should be created as a directory.
Dir bool
// NoValueOnSuccess specifies whether the response contains the current value of the Node.
// If set, the response will only contain the current value when the request fails.
NoValueOnSuccess bool
}
type GetOptions struct {
// Recursive defines whether or not all children of the Node
// should be returned.
Recursive bool
// Sort instructs the server whether or not to sort the Nodes.
// If true, the Nodes are sorted alphabetically by key in
// ascending order (A to z). If false (default), the Nodes will
// not be sorted and the ordering used should not be considered
// predictable.
Sort bool
// Quorum specifies whether it gets the latest committed value that
// has been applied in quorum of members, which ensures external
// consistency (or linearizability).
Quorum bool
}
type DeleteOptions struct {
// PrevValue specifies what the current value of the Node must
// be in order for the Delete operation to succeed.
//
// Leaving this field empty means that the caller wishes to
// ignore the current value of the Node. This cannot be used
// to compare the Node's current value to an empty string.
PrevValue string
// PrevIndex indicates what the current ModifiedIndex of the
// Node must be in order for the Delete operation to succeed.
//
// If PrevIndex is set to 0 (default), no comparison is made.
PrevIndex uint64
// Recursive defines whether or not all children of the Node
// should be deleted. If set to true, all children of the Node
// identified by the given key will be deleted. If left unset
// or explicitly set to false, only a single Node will be
// deleted.
Recursive bool
// Dir specifies whether or not this Node should be removed as a directory.
Dir bool
}
type Watcher interface {
// Next blocks until an etcd event occurs, then returns a Response
// representing that event. The behavior of Next depends on the
// WatcherOptions used to construct the Watcher. Next is designed to
// be called repeatedly, each time blocking until a subsequent event
// is available.
//
// If the provided context is cancelled, Next will return a non-nil
// error. Any other failures encountered while waiting for the next
// event (connection issues, deserialization failures, etc) will
// also result in a non-nil error.
Next(context.Context) (*Response, error)
}
type Response struct {
// Action is the name of the operation that occurred. Possible values
// include get, set, delete, update, create, compareAndSwap,
// compareAndDelete and expire.
Action string `json:"action"`
// Node represents the state of the relevant etcd Node.
Node *Node `json:"node"`
// PrevNode represents the previous state of the Node. PrevNode is non-nil
// only if the Node existed before the action occurred and the action
// caused a change to the Node.
PrevNode *Node `json:"prevNode"`
// Index holds the cluster-level index at the time the Response was generated.
// This index is not tied to the Node(s) contained in this Response.
Index uint64 `json:"-"`
// ClusterID holds the cluster-level ID reported by the server. This
// should be different for different etcd clusters.
ClusterID string `json:"-"`
}
type Node struct {
// Key represents the unique location of this Node (e.g. "/foo/bar").
Key string `json:"key"`
// Dir reports whether node describes a directory.
Dir bool `json:"dir,omitempty"`
// Value is the current data stored on this Node. If this Node
// is a directory, Value will be empty.
Value string `json:"value"`
// Nodes holds the children of this Node, only if this Node is a directory.
// This slice of will be arbitrarily deep (children, grandchildren, great-
// grandchildren, etc.) if a recursive Get or Watch request were made.
Nodes Nodes `json:"nodes"`
// CreatedIndex is the etcd index at-which this Node was created.
CreatedIndex uint64 `json:"createdIndex"`
// ModifiedIndex is the etcd index at-which this Node was last modified.
ModifiedIndex uint64 `json:"modifiedIndex"`
// Expiration is the server side expiration time of the key.
Expiration *time.Time `json:"expiration,omitempty"`
// TTL is the time to live of the key in second.
TTL int64 `json:"ttl,omitempty"`
}
func (n *Node) String() string {
return fmt.Sprintf("{Key: %s, CreatedIndex: %d, ModifiedIndex: %d, TTL: %d}", n.Key, n.CreatedIndex, n.ModifiedIndex, n.TTL)
}
// TTLDuration returns the Node's TTL as a time.Duration object
func (n *Node) TTLDuration() time.Duration {
return time.Duration(n.TTL) * time.Second
}
type Nodes []*Node
// interfaces for sorting
func (ns Nodes) Len() int { return len(ns) }
func (ns Nodes) Less(i, j int) bool { return ns[i].Key < ns[j].Key }
func (ns Nodes) Swap(i, j int) { ns[i], ns[j] = ns[j], ns[i] }
type httpKeysAPI struct {
client httpClient
prefix string
}
func (k *httpKeysAPI) Set(ctx context.Context, key, val string, opts *SetOptions) (*Response, error) {
act := &setAction{
Prefix: k.prefix,
Key: key,
Value: val,
}
if opts != nil {
act.PrevValue = opts.PrevValue
act.PrevIndex = opts.PrevIndex
act.PrevExist = opts.PrevExist
act.TTL = opts.TTL
act.Refresh = opts.Refresh
act.Dir = opts.Dir
act.NoValueOnSuccess = opts.NoValueOnSuccess
}
doCtx := ctx
if act.PrevExist == PrevNoExist {
doCtx = context.WithValue(doCtx, &oneShotCtxValue, &oneShotCtxValue)
}
resp, body, err := k.client.Do(doCtx, act)
if err != nil {
return nil, err
}
return unmarshalHTTPResponse(resp.StatusCode, resp.Header, body)
}
func (k *httpKeysAPI) Create(ctx context.Context, key, val string) (*Response, error) {
return k.Set(ctx, key, val, &SetOptions{PrevExist: PrevNoExist})
}
func (k *httpKeysAPI) CreateInOrder(ctx context.Context, dir, val string, opts *CreateInOrderOptions) (*Response, error) {
act := &createInOrderAction{
Prefix: k.prefix,
Dir: dir,
Value: val,
}
if opts != nil {
act.TTL = opts.TTL
}
resp, body, err := k.client.Do(ctx, act)
if err != nil {
return nil, err
}
return unmarshalHTTPResponse(resp.StatusCode, resp.Header, body)
}
func (k *httpKeysAPI) Update(ctx context.Context, key, val string) (*Response, error) {
return k.Set(ctx, key, val, &SetOptions{PrevExist: PrevExist})
}
func (k *httpKeysAPI) Delete(ctx context.Context, key string, opts *DeleteOptions) (*Response, error) {
act := &deleteAction{
Prefix: k.prefix,
Key: key,
}
if opts != nil {
act.PrevValue = opts.PrevValue
act.PrevIndex = opts.PrevIndex
act.Dir = opts.Dir
act.Recursive = opts.Recursive
}
doCtx := context.WithValue(ctx, &oneShotCtxValue, &oneShotCtxValue)
resp, body, err := k.client.Do(doCtx, act)
if err != nil {
return nil, err
}
return unmarshalHTTPResponse(resp.StatusCode, resp.Header, body)
}
func (k *httpKeysAPI) Get(ctx context.Context, key string, opts *GetOptions) (*Response, error) {
act := &getAction{
Prefix: k.prefix,
Key: key,
}
if opts != nil {
act.Recursive = opts.Recursive
act.Sorted = opts.Sort
act.Quorum = opts.Quorum
}
resp, body, err := k.client.Do(ctx, act)
if err != nil {
return nil, err
}
return unmarshalHTTPResponse(resp.StatusCode, resp.Header, body)
}
func (k *httpKeysAPI) Watcher(key string, opts *WatcherOptions) Watcher {
act := waitAction{
Prefix: k.prefix,
Key: key,
}
if opts != nil {
act.Recursive = opts.Recursive
if opts.AfterIndex > 0 {
act.WaitIndex = opts.AfterIndex + 1
}
}
return &httpWatcher{
client: k.client,
nextWait: act,
}
}
type httpWatcher struct {
client httpClient
nextWait waitAction
}
func (hw *httpWatcher) Next(ctx context.Context) (*Response, error) {
for {
httpresp, body, err := hw.client.Do(ctx, &hw.nextWait)
if err != nil {
return nil, err
}
resp, err := unmarshalHTTPResponse(httpresp.StatusCode, httpresp.Header, body)
if err != nil {
if err == ErrEmptyBody {
continue
}
return nil, err
}
hw.nextWait.WaitIndex = resp.Node.ModifiedIndex + 1
return resp, nil
}
}
// v2KeysURL forms a URL representing the location of a key.
// The endpoint argument represents the base URL of an etcd
// server. The prefix is the path needed to route from the
// provided endpoint's path to the root of the keys API
// (typically "/v2/keys").
func v2KeysURL(ep url.URL, prefix, key string) *url.URL {
// We concatenate all parts together manually. We cannot use
// path.Join because it does not reserve trailing slash.
// We call CanonicalURLPath to further cleanup the path.
if prefix != "" && prefix[0] != '/' {
prefix = "/" + prefix
}
if key != "" && key[0] != '/' {
key = "/" + key
}
ep.Path = pathutil.CanonicalURLPath(ep.Path + prefix + key)
return &ep
}
type getAction struct {
Prefix string
Key string
Recursive bool
Sorted bool
Quorum bool
}
func (g *getAction) HTTPRequest(ep url.URL) *http.Request {
u := v2KeysURL(ep, g.Prefix, g.Key)
params := u.Query()
params.Set("recursive", strconv.FormatBool(g.Recursive))
params.Set("sorted", strconv.FormatBool(g.Sorted))
params.Set("quorum", strconv.FormatBool(g.Quorum))
u.RawQuery = params.Encode()
req, _ := http.NewRequest("GET", u.String(), nil)
return req
}
type waitAction struct {
Prefix string
Key string
WaitIndex uint64
Recursive bool
}
func (w *waitAction) HTTPRequest(ep url.URL) *http.Request {
u := v2KeysURL(ep, w.Prefix, w.Key)
params := u.Query()
params.Set("wait", "true")
params.Set("waitIndex", strconv.FormatUint(w.WaitIndex, 10))
params.Set("recursive", strconv.FormatBool(w.Recursive))
u.RawQuery = params.Encode()
req, _ := http.NewRequest("GET", u.String(), nil)
return req
}
type setAction struct {
Prefix string
Key string
Value string
PrevValue string
PrevIndex uint64
PrevExist PrevExistType
TTL time.Duration
Refresh bool
Dir bool
NoValueOnSuccess bool
}
func (a *setAction) HTTPRequest(ep url.URL) *http.Request {
u := v2KeysURL(ep, a.Prefix, a.Key)
params := u.Query()
form := url.Values{}
// we're either creating a directory or setting a key
if a.Dir {
params.Set("dir", strconv.FormatBool(a.Dir))
} else {
// These options are only valid for setting a key
if a.PrevValue != "" {
params.Set("prevValue", a.PrevValue)
}
form.Add("value", a.Value)
}
// Options which apply to both setting a key and creating a dir
if a.PrevIndex != 0 {
params.Set("prevIndex", strconv.FormatUint(a.PrevIndex, 10))
}
if a.PrevExist != PrevIgnore {
params.Set("prevExist", string(a.PrevExist))
}
if a.TTL > 0 {
form.Add("ttl", strconv.FormatUint(uint64(a.TTL.Seconds()), 10))
}
if a.Refresh {
form.Add("refresh", "true")
}
if a.NoValueOnSuccess {
params.Set("noValueOnSuccess", strconv.FormatBool(a.NoValueOnSuccess))
}
u.RawQuery = params.Encode()
body := strings.NewReader(form.Encode())
req, _ := http.NewRequest("PUT", u.String(), body)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
return req
}
type deleteAction struct {
Prefix string
Key string
PrevValue string
PrevIndex uint64
Dir bool
Recursive bool
}
func (a *deleteAction) HTTPRequest(ep url.URL) *http.Request {
u := v2KeysURL(ep, a.Prefix, a.Key)
params := u.Query()
if a.PrevValue != "" {
params.Set("prevValue", a.PrevValue)
}
if a.PrevIndex != 0 {
params.Set("prevIndex", strconv.FormatUint(a.PrevIndex, 10))
}
if a.Dir {
params.Set("dir", "true")
}
if a.Recursive {
params.Set("recursive", "true")
}
u.RawQuery = params.Encode()
req, _ := http.NewRequest("DELETE", u.String(), nil)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
return req
}
type createInOrderAction struct {
Prefix string
Dir string
Value string
TTL time.Duration
}
func (a *createInOrderAction) HTTPRequest(ep url.URL) *http.Request {
u := v2KeysURL(ep, a.Prefix, a.Dir)
form := url.Values{}
form.Add("value", a.Value)
if a.TTL > 0 {
form.Add("ttl", strconv.FormatUint(uint64(a.TTL.Seconds()), 10))
}
body := strings.NewReader(form.Encode())
req, _ := http.NewRequest("POST", u.String(), body)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
return req
}
func unmarshalHTTPResponse(code int, header http.Header, body []byte) (res *Response, err error) {
switch code {
case http.StatusOK, http.StatusCreated:
if len(body) == 0 {
return nil, ErrEmptyBody
}
res, err = unmarshalSuccessfulKeysResponse(header, body)
default:
err = unmarshalFailedKeysResponse(body)
}
return res, err
}
var jsonIterator = caseSensitiveJsonIterator()
func unmarshalSuccessfulKeysResponse(header http.Header, body []byte) (*Response, error) {
var res Response
err := jsonIterator.Unmarshal(body, &res)
if err != nil {
return nil, ErrInvalidJSON
}
if header.Get("X-Etcd-Index") != "" {
res.Index, err = strconv.ParseUint(header.Get("X-Etcd-Index"), 10, 64)
if err != nil {
return nil, err
}
}
res.ClusterID = header.Get("X-Etcd-Cluster-ID")
return &res, nil
}
func unmarshalFailedKeysResponse(body []byte) error {
var etcdErr Error
if err := json.Unmarshal(body, &etcdErr); err != nil {
return ErrInvalidJSON
}
return etcdErr
}<|fim▁end|> |
ErrorCodePrevValueRequired = 201
ErrorCodeTTLNaN = 202
ErrorCodeIndexNaN = 203 |
<|file_name|>hostdev_test.py<|end_file_name|><|fim▁begin|>#
# Copyright 2014-2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import six
from vdsm.common import exception
from vdsm.common import xmlutils
from vdsm.virt.vmdevices import network, hwclass
from testlib import VdsmTestCase as TestCaseBase, XMLTestCase
from testlib import permutations, expandPermutations
from monkeypatch import MonkeyClass, MonkeyPatchScope
from testValidation import skipif
from vdsm.common import hooks
from vdsm.common import hostdev
from vdsm.common import libvirtconnection
import hostdevlib
@expandPermutations
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
@MonkeyClass(hooks, 'after_hostdev_list_by_caps', lambda json: json)
@MonkeyClass(hostdev, '_get_udev_block_mapping',
lambda: hostdevlib.UDEV_BLOCK_MAP)
class HostdevTests(TestCaseBase):
def testProcessDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.ADDITIONAL_DEVICE).XMLDesc()
)
self.assertEqual(
hostdevlib.ADDITIONAL_DEVICE_PROCESSED,
deviceXML
)
@skipif(six.PY3, "Not relevant in Python 3 libvirt")
# libvirt in Python 3 returns strings, so we don't deal with
# invalid coding anymore.
def testProcessDeviceParamsInvalidEncoding(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.COMPUTER_DEVICE).XMLDesc()
)
self.assertEqual(
hostdevlib.COMPUTER_DEVICE_PROCESSED,
deviceXML
)
def testProcessSRIOV_PFDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.SRIOV_PF).XMLDesc()
)
self.assertEqual(
hostdevlib.SRIOV_PF_PROCESSED,
deviceXML
)
def testProcessSRIOV_VFDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.SRIOV_VF).XMLDesc()
)
self.assertEqual(hostdevlib.SRIOV_VF_PROCESSED, deviceXML)
def testProcessNetDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.NET_DEVICE).XMLDesc()
)
self.assertEqual(hostdevlib.NET_DEVICE_PROCESSED, deviceXML)
def testProcessMdevDeviceParams(self):
deviceXML = hostdev._process_device_params(
libvirtconnection.get().nodeDeviceLookupByName(
hostdevlib.MDEV_DEVICE).XMLDesc()
)
self.assertEqual(hostdevlib.MDEV_DEVICE_PROCESSED, deviceXML)
def testGetDevicesFromLibvirt(self):
libvirt_devices, _ = hostdev._get_devices_from_libvirt()
self.assertEqual(hostdevlib.DEVICES_PROCESSED, libvirt_devices)
self.assertEqual(len(libvirt_devices),
len(hostdevlib.PCI_DEVICES) +
len(hostdevlib.USB_DEVICES) +
len(hostdevlib.SCSI_DEVICES))
@permutations([[''], [('pci',)], [('usb_device',)],
[('pci', 'usb_device')]])
def testListByCaps(self, caps):
devices = hostdev.list_by_caps(caps)
for cap in caps:
self.assertTrue(set(hostdevlib.DEVICES_BY_CAPS[cap].keys()).
issubset(set(devices.keys())))
@permutations([
# addr_type, addr, name
('usb', {'bus': '1', 'device': '2'}, 'usb_1_1'),
('usb', {'bus': '1', 'device': '10'}, 'usb_1_1_4'),
('pci', {'slot': '26', 'bus': '0', 'domain': '0', 'function': '0'},
'pci_0000_00_1a_0'),
('scsi', {'bus': '0', 'host': '1', 'lun': '0', 'target': '0'},
'scsi_1_0_0_0'),
])
def test_device_name_from_address(self, addr_type, addr, name):
# we need to make sure we scan all the devices (hence caps=None)
hostdev.list_by_caps()
self.assertEqual(
hostdev.device_name_from_address(addr_type, addr),
name
)
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection.get)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
@MonkeyClass(hooks, 'after_hostdev_list_by_caps', lambda json: json)
class HostdevPerformanceTests(TestCaseBase):
def test_3k_storage_devices(self):
with hostdevlib.Connection.use_hostdev_tree():
self.assertEqual(
len(hostdev.list_by_caps()),
len(libvirtconnection.get().listAllDevices())
)
@expandPermutations
@MonkeyClass(libvirtconnection, 'get', hostdevlib.Connection)
@MonkeyClass(hostdev, '_sriov_totalvfs', hostdevlib.fake_totalvfs)
@MonkeyClass(hostdev, '_pci_header_type', lambda _: 0)
class HostdevCreationTests(XMLTestCase):
_PCI_ADDRESS = {'slot': '0x02', 'bus': '0x01', 'domain': '0x0000',
'function': '0x0', 'type': 'pci'}
_PCI_ADDRESS_XML = '<address bus="0x01" domain="0x0000" function="0x0" \
slot="0x02" type="pci"/>'
def setUp(self):
self.conf = {
'vmName': 'testVm',
'vmId': '9ffe28b6-6134-4b1e-8804-1185f49c436f',
'smp': '8', 'maxVCpus': '160',
'memSize': '1024', 'memGuaranteedSize': '512'}
# TODO: next 2 tests should reside in their own module (interfaceTests.py)
def testCreateSRIOVVF(self):
dev_spec = {'type': hwclass.NIC, 'device': 'hostdev',
'hostdev': hostdevlib.SRIOV_VF,
'macAddr': 'ff:ff:ff:ff:ff:ff',
'specParams': {'vlanid': 3},
'bootOrder': '9'}
device = network.Interface(self.log, **dev_spec)
self.assertXMLEqual(
xmlutils.tostring(device.getXML()),
hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % ('',))
def testCreateSRIOVVFWithAddress(self):
dev_spec = {'type': hwclass.NIC, 'device': 'hostdev',
'hostdev': hostdevlib.SRIOV_VF,
'macAddr': 'ff:ff:ff:ff:ff:ff',
'specParams': {'vlanid': 3},
'bootOrder': '9', 'address':
{'slot': '0x02', 'bus': '0x01', 'domain': '0x0000',<|fim▁hole|> hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % (
self._PCI_ADDRESS_XML
)
)
@expandPermutations
@MonkeyClass(hostdev, '_each_supported_mdev_type', hostdevlib.fake_mdev_types)
@MonkeyClass(hostdev, '_mdev_type_details', hostdevlib.fake_mdev_details)
@MonkeyClass(hostdev, '_mdev_device_vendor', hostdevlib.fake_mdev_vendor)
@MonkeyClass(hostdev, '_mdev_type_devices', hostdevlib.fake_mdev_instances)
@MonkeyClass(hostdev, 'supervdsm', hostdevlib.FakeSuperVdsm())
class TestMdev(TestCaseBase):
def setUp(self):
def make_device(name):
mdev_types = [
hostdevlib.FakeMdevType('incompatible-1', 2),
hostdevlib.FakeMdevType('8q', 1),
hostdevlib.FakeMdevType('4q', 2),
hostdevlib.FakeMdevType('incompatible-2', 2),
]
return hostdevlib.FakeMdevDevice(name=name, vendor='0x10de',
mdev_types=mdev_types)
self.devices = [make_device(name) for name in ('card-1', 'card-2',)]
@permutations([
# (mdev_type, mdev_uuid)*, mdev_placement, instances
[[('4q', '4q-1')],
hostdev.MdevPlacement.COMPACT, [['4q-1'], []]],
[[('8q', '8q-1')],
hostdev.MdevPlacement.SEPARATE, [['8q-1'], []]],
[[('4q', '4q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.COMPACT, [['4q-1', '4q-2'], []]],
[[('4q', '4q-1'), ('8q', '8q-1')],
hostdev.MdevPlacement.COMPACT, [['4q-1'], ['8q-1']]],
[[('4q', '4q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.SEPARATE, [['4q-1'], ['4q-2']]],
[[('4q', '4q-1'), ('8q', '8q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.COMPACT, [['4q-1', '4q-2'], ['8q-1']]],
[[('8q', '8q-1'), ('4q', '4q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.COMPACT, [['8q-1'], ['4q-1', '4q-2']]],
[[('4q', '4q-1'), ('4q', '4q-2'), ('8q', '8q-1')],
hostdev.MdevPlacement.COMPACT, [['4q-1', '4q-2'], ['8q-1']]],
[[('4q', '4q-1'), ('8q', '8q-1'), ('4q', '4q-2')],
hostdev.MdevPlacement.SEPARATE, [['4q-1', '4q-2'], ['8q-1']]],
])
def test_vgpu_placement(self, mdev_specs, mdev_placement, instances):
with MonkeyPatchScope([
(hostdev, '_each_mdev_device', lambda: self.devices)
]):
for mdev_type, mdev_uuid in mdev_specs:
hostdev.spawn_mdev(mdev_type, mdev_uuid, mdev_placement,
self.log)
for inst, dev in zip(instances, self.devices):
dev_inst = []
for mdev_type in dev.mdev_types:
dev_inst.extend(mdev_type.instances)
self.assertEqual(inst, dev_inst)
@permutations([
[hostdev.MdevPlacement.COMPACT],
[hostdev.MdevPlacement.SEPARATE],
])
def test_unsupported_vgpu_placement(self, placement):
with MonkeyPatchScope([
(hostdev, '_each_mdev_device', lambda: self.devices)
]):
self.assertRaises(
exception.ResourceUnavailable,
hostdev.spawn_mdev, 'unsupported', '1234', placement, self.log
)<|fim▁end|> | 'function': '0x0', 'type': 'pci'}}
device = network.Interface(self.log, **dev_spec)
self.assertXMLEqual(
xmlutils.tostring(device.getXML()), |
<|file_name|>fa.js<|end_file_name|><|fim▁begin|>OC.L10N.register(
"templateeditor",
{
"Could not load template" : "امکان بارگذاری قالب وجود ندارد",
"Saved" : "ذخیره شد",
"Reset" : "تنظیم مجدد",
"An error occurred" : "یک خطا رخ داده است",
"Sharing email - public link shares (HTML)" : "ایمیل اشتراک گذاری-لینک عمومی اشتراک گذاری(HTML)",
"Sharing email - public link shares (plain text fallback)" : "ایمیل اشتراک گذاری-لینک عمومی اشتراک گذاری(plain text fallback)",
"Sharing email (HTML)" : "اشتراکگذاری ایمیل (HTML)",
"Sharing email (plain text fallback)" : "ایمیل اشتراک گذاری (plain text fallback)",
"Lost password mail" : "ایمیل فراموش کردن رمز عبور",
"New user email (HTML)" : "ایمیل کاربری جدید (HTML)",
"New user email (plain text fallback)" : "ایمیل کاربر جدید (plain text fallback)",<|fim▁hole|> "Please choose a template" : "لطفا یک قالب انتخاب کنید",
"Save" : "ذخیره"
},
"nplurals=1; plural=0;");<|fim▁end|> | "Activity notification mail" : "ایمیل هشدار فعالیت",
"Mail Templates" : "قالبهای ایمیل",
"Theme" : "تم",
"Template" : "قالب", |
<|file_name|>MyArgparse.py<|end_file_name|><|fim▁begin|># coding=utf-8
from __future__ import unicode_literals
"""
Name: MyArgparse
Author: Andy Liu
Email : [email protected]
Created: 3/26/2015
Copyright: All rights reserved.<|fim▁hole|> as published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import logging
def parse_command_line():
parser = argparse.ArgumentParser(prog='PROG', description='%(prog)s can ...')
parser.add_argument('NoPre', action="store", help='help information')
parser.add_argument('-t', action="store_true", dest='boolean_switch', default=False, help='Set a switch to true')
parser.add_argument('-f', action="store_false", dest='boolean_switch', default=True, help='Set a switch to false')
parser.add_argument('-s', action="store", dest='simple_value', help="Store a simple value")
parser.add_argument('-st', action="store", dest="simple_value", type=int,
help='Store a simple value and define type')
parser.add_argument('-c', action='store_const', dest='constant_value', const='value-to-store',
help='Store a constant value')
parser.add_argument('-a', action='append', dest='collection', default=[], help='Add repeated values to a list')
parser.add_argument('-A', action='append_const', dest='const_collection', const='value-1-to-append', default=[],
help='Add different values to list')
parser.add_argument('-B', action='append_const', dest='const_collection', const='value-2-to-append',
help='Add different values to list')
args = parser.parse_args()
logging.debug('NoPre = %r' % args.NoPre)
logging.debug('simple_value = %r' % args.simple_value)
logging.debug('constant_value = %r' % args.constant_value)
logging.debug('boolean_switch = %r' % args.boolean_switch)
logging.debug('collection = %r' % args.collection)
logging.debug('const_collection = %r' % args.const_collection)
return args
if __name__ == '__main__':
from MyLog import init_logger
logger = init_logger()
parse_command_line()<|fim▁end|> | Licence: This program is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License |
<|file_name|>gateway_types.go<|end_file_name|><|fim▁begin|>/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// +genclient
// +kubebuilder:object:root=true
// +kubebuilder:resource:shortName=gtw
// +kubebuilder:subresource:status
// +kubebuilder:printcolumn:name="Class",type=string,JSONPath=`.spec.gatewayClassName`
// Gateway represents an instantiation of a service-traffic handling
// infrastructure by binding Listeners to a set of IP addresses.
//
// Implementations should add the `gateway-exists-finalizer.networking.x-k8s.io`
// finalizer on the associated GatewayClass whenever Gateway(s) is running.
// This ensures that a GatewayClass associated with a Gateway(s) is not
// deleted while in use.
type Gateway struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
// Spec defines the desired state of Gateway.
Spec GatewaySpec `json:"spec,omitempty"`
// Status defines the current state of Gateway.
//
// +kubebuilder:default={conditions: {{type: "Scheduled", status: "False", reason:"NotReconciled", message:"Waiting for controller", lastTransitionTime: "1970-01-01T00:00:00Z"}}}
Status GatewayStatus `json:"status,omitempty"`
}
// +kubebuilder:object:root=true
// GatewayList contains a list of Gateway.
type GatewayList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []Gateway `json:"items"`
}
// GatewaySpec defines the desired state of Gateway.
//
// Not all possible combinations of options specified in the Spec are
// valid. Some invalid configurations can be caught synchronously via a
// webhook, but there are many cases that will require asynchronous
// signaling via the GatewayStatus block.
type GatewaySpec struct {
// GatewayClassName used for this Gateway. This is the name of a
// GatewayClass resource.
//
// +kubebuilder:validation:MinLength=1
// +kubebuilder:validation:MaxLength=253
GatewayClassName string `json:"gatewayClassName"`
// Listeners associated with this Gateway. Listeners define
// logical endpoints that are bound on this Gateway's addresses.
// At least one Listener MUST be specified.
//
// An implementation MAY group Listeners by Port and then collapse each
// group of Listeners into a single Listener if the implementation
// determines that the Listeners in the group are "compatible". An
// implementation MAY also group together and collapse compatible
// Listeners belonging to different Gateways.
//
// For example, an implementation might consider Listeners to be
// compatible with each other if all of the following conditions are
// met:
//
// 1. Either each Listener within the group specifies the "HTTP"
// Protocol or each Listener within the group specifies either
// the "HTTPS" or "TLS" Protocol.
//
// 2. Each Listener within the group specifies a Hostname that is unique
// within the group.
//
// 3. As a special case, one Listener within a group may omit Hostname,
// in which case this Listener matches when no other Listener
// matches.
//
// If the implementation does collapse compatible Listeners, the
// hostname provided in the incoming client request MUST be
// matched to a Listener to find the correct set of Routes.
// The incoming hostname MUST be matched using the Hostname
// field for each Listener in order of most to least specific.
// That is, exact matches must be processed before wildcard
// matches.
//
// If this field specifies multiple Listeners that have the same
// Port value but are not compatible, the implementation must raise
// a "Conflicted" condition in the Listener status.
//
// Support: Core
//
// +kubebuilder:validation:MinItems=1
// +kubebuilder:validation:MaxItems=64
Listeners []Listener `json:"listeners"`
// Addresses requested for this gateway. This is optional and
// behavior can depend on the GatewayClass. If a value is set
// in the spec and the requested address is invalid, the
// GatewayClass MUST indicate this in the associated entry in
// GatewayStatus.Addresses.
//
// If no Addresses are specified, the GatewayClass may
// schedule the Gateway in an implementation-defined manner,
// assigning an appropriate set of Addresses.
//
// The GatewayClass MUST bind all Listeners to every
// GatewayAddress that it assigns to the Gateway.
//
// Support: Core
//
// +optional
// +kubebuilder:validation:MaxItems=16
Addresses []GatewayAddress `json:"addresses,omitempty"`
}
// Listener embodies the concept of a logical endpoint where a Gateway can
// accept network connections. Each listener in a Gateway must have a unique
// combination of Hostname, Port, and Protocol. This will be enforced by a
// validating webhook.
type Listener struct {
// Hostname specifies the virtual hostname to match for protocol types that
// define this concept. When unspecified, "", or `*`, all hostnames are
// matched. This field can be omitted for protocols that don't require
// hostname based matching.
//
// Hostname is the fully qualified domain name of a network host, as defined
// by RFC 3986. Note the following deviations from the "host" part of the
// URI as defined in the RFC:
//
// 1. IP literals are not allowed.
// 2. The `:` delimiter is not respected because ports are not allowed.
//
// Hostname can be "precise" which is a domain name without the terminating
// dot of a network host (e.g. "foo.example.com") or "wildcard", which is a
// domain name prefixed with a single wildcard label (e.g. `*.example.com`).
// The wildcard character `*` must appear by itself as the first DNS label
// and matches only a single label.
//
// Support: Core
//
// +optional
Hostname *Hostname `json:"hostname,omitempty"`
// Port is the network port. Multiple listeners may use the
// same port, subject to the Listener compatibility rules.
//
// Support: Core
Port PortNumber `json:"port"`
// Protocol specifies the network protocol this listener expects to receive.
// The GatewayClass MUST apply the Hostname match appropriately for each
// protocol:
//
// * For the "TLS" protocol, the Hostname match MUST be
// applied to the [SNI](https://tools.ietf.org/html/rfc6066#section-3)
// server name offered by the client.
// * For the "HTTP" protocol, the Hostname match MUST be
// applied to the host portion of the
// [effective request URI](https://tools.ietf.org/html/rfc7230#section-5.5)
// or the [:authority pseudo-header](https://tools.ietf.org/html/rfc7540#section-8.1.2.3)
// * For the "HTTPS" protocol, the Hostname match MUST be
// applied at both the TLS and HTTP protocol layers.
//
// Support: Core
Protocol ProtocolType `json:"protocol"`
// TLS is the TLS configuration for the Listener. This field
// is required if the Protocol field is "HTTPS" or "TLS" and
// ignored otherwise.
//
// The association of SNIs to Certificate defined in GatewayTLSConfig is
// defined based on the Hostname field for this listener.
//
// The GatewayClass MUST use the longest matching SNI out of all
// available certificates for any TLS handshake.
//
// Support: Core
//
// +optional
TLS *GatewayTLSConfig `json:"tls,omitempty"`
// Routes specifies a schema for associating routes with the
// Listener using selectors. A Route is a resource capable of
// servicing a request and allows a cluster operator to expose
// a cluster resource (i.e. Service) by externally-reachable
// URL, load-balance traffic and terminate SSL/TLS. Typically,
// a route is a "HTTPRoute" or "TCPRoute" in group
// "networking.x-k8s.io", however, an implementation may support
// other types of resources.
//
// The Routes selector MUST select a set of objects that
// are compatible with the application protocol specified in
// the Protocol field.
//
// Although a client request may technically match multiple route rules,
// only one rule may ultimately receive the request. Matching precedence
// MUST be determined in order of the following criteria:
//
// * The most specific match. For example, the most specific HTTPRoute match
// is determined by the longest matching combination of hostname and path.
// * The oldest Route based on creation timestamp. For example, a Route with
// a creation timestamp of "2020-09-08 01:02:03" is given precedence over
// a Route with a creation timestamp of "2020-09-08 01:02:04".
// * If everything else is equivalent, the Route appearing first in
// alphabetical order (namespace/name) should be given precedence. For
// example, foo/bar is given precedence over foo/baz.
//
// All valid portions of a Route selected by this field should be supported.
// Invalid portions of a Route can be ignored (sometimes that will mean the
// full Route). If a portion of a Route transitions from valid to invalid,
// support for that portion of the Route should be dropped to ensure
// consistency. For example, even if a filter specified by a Route is
// invalid, the rest of the Route should still be supported.
//
// Support: Core
Routes RouteBindingSelector `json:"routes"`
}
// ProtocolType defines the application protocol accepted by a Listener.
// Implementations are not required to accept all the defined protocols.
// If an implementation does not support a specified protocol, it
// should raise a "Detached" condition for the affected Listener with
// a reason of "UnsupportedProtocol".
//
// Core ProtocolType values are listed in the table below.
//
// Implementations can define their own protocols if a core ProtocolType does not
// exist. Such definitions must use prefixed name, such as
// `mycompany.com/my-custom-protocol`. Un-prefixed names are reserved for core
// protocols. Any protocol defined by implementations will fall under custom
// conformance.
type ProtocolType string
const (
// Accepts cleartext HTTP/1.1 sessions over TCP.
HTTPProtocolType ProtocolType = "HTTP"
// Accepts HTTP/1.1 or HTTP/2 sessions over TLS.
HTTPSProtocolType ProtocolType = "HTTPS"
// Accepts TLS sessions over TCP.
TLSProtocolType ProtocolType = "TLS"
// Accepts TCP sessions.
TCPProtocolType ProtocolType = "TCP"
// Accepts UDP packets.
UDPProtocolType ProtocolType = "UDP"
)
// TLSRouteOverrideType type defines the level of allowance for Routes
// to override a specific TLS setting.
// +kubebuilder:validation:Enum=Allow;Deny
// +kubebuilder:default=Deny
type TLSRouteOverrideType string
const (
// Allows the parameter to be configured from all routes.
TLSROuteOVerrideAllow TLSRouteOverrideType = "Allow"
// Prohibits the parameter from being configured from any route.
TLSRouteOverrideDeny TLSRouteOverrideType = "Deny"
)
// TLSOverridePolicy defines a schema for overriding TLS settings at the Route
// level.
type TLSOverridePolicy struct {
// Certificate dictates if TLS certificates can be configured
// via Routes. If set to 'Allow', a TLS certificate for a hostname
// defined in a Route takes precedence over the certificate defined in
// Gateway.
//
// Support: Core
//
// +optional
// +kubebuilder:default=Deny
Certificate TLSRouteOverrideType `json:"certificate,omitempty"`
}
// GatewayTLSConfig describes a TLS configuration.
//
// References:
//
// - nginx: https://nginx.org/en/docs/http/configuring_https_servers.html
// - envoy: https://www.envoyproxy.io/docs/envoy/latest/api-v2/api/v2/auth/cert.proto
// - haproxy: https://www.haproxy.com/documentation/aloha/9-5/traffic-management/lb-layer7/tls/
// - gcp: https://cloud.google.com/load-balancing/docs/use-ssl-policies#creating_an_ssl_policy_with_a_custom_profile
// - aws: https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html#describe-ssl-policies
// - azure: https://docs.microsoft.com/en-us/azure/app-service/configure-ssl-bindings#enforce-tls-1112
type GatewayTLSConfig struct {
// Mode defines the TLS behavior for the TLS session initiated by the client.
// There are two possible modes:
// - Terminate: The TLS session between the downstream client
// and the Gateway is terminated at the Gateway. This mode requires
// certificateRef to be set.
// - Passthrough: The TLS session is NOT terminated by the Gateway. This
// implies that the Gateway can't decipher the TLS stream except for
// the ClientHello message of the TLS protocol.
// CertificateRef field is ignored in this mode.
//
// Support: Core
//
// +optional
// +kubebuilder:default=Terminate
Mode TLSModeType `json:"mode,omitempty"`
// CertificateRef is the reference to Kubernetes object that contain a
// TLS certificate and private key. This certificate MUST be used for
// TLS handshakes for the domain this GatewayTLSConfig is associated with.
//
// This field is required when mode is set to "Terminate" (default) and
// optional otherwise.
//
// If an entry in this list omits or specifies the empty string for both
// the group and the resource, the resource defaults to "secrets". An
// implementation may support other resources (for example, resource
// "mycertificates" in group "networking.acme.io").
//
// Support: Core (Kubernetes Secrets)
//
// Support: Implementation-specific (Other resource types)
//
// +optional
CertificateRef *LocalObjectReference `json:"certificateRef,omitempty"`
// RouteOverride dictates if TLS settings can be configured
// via Routes or not.
//
// CertificateRef must be defined even if `routeOverride.certificate` is
// set to 'Allow' as it will be used as the default certificate for the
// listener.
//
// Support: Core
//
// +optional
// +kubebuilder:default={certificate:Deny}
RouteOverride TLSOverridePolicy `json:"routeOverride,omitempty"`
// Options are a list of key/value pairs to give extended options
// to the provider.
//
// There variation among providers as to how ciphersuites are
// expressed. If there is a common subset for expressing ciphers
// then it will make sense to loft that as a core API
// construct.
//
// Support: Implementation-specific
//
// +optional
Options map[string]string `json:"options,omitempty"`
}
// TLSModeType type defines how a Gateway handles TLS sessions.
//
// +kubebuilder:validation:Enum=Terminate;Passthrough
type TLSModeType string
const (
// In this mode, TLS session between the downstream client<|fim▁hole|> // and the Gateway is terminated at the Gateway.
TLSModeTerminate TLSModeType = "Terminate"
// In this mode, the TLS session is NOT terminated by the Gateway. This
// implies that the Gateway can't decipher the TLS stream except for
// the ClientHello message of the TLS protocol.
TLSModePassthrough TLSModeType = "Passthrough"
)
// RouteBindingSelector defines a schema for associating routes with the Gateway.
// If Namespaces and Selector are defined, only routes matching both selectors are
// associated with the Gateway.
type RouteBindingSelector struct {
// Namespaces indicates in which namespaces Routes should be selected
// for this Gateway. This is restricted to the namespace of this Gateway by
// default.
//
// Support: Core
//
// +optional
// +kubebuilder:default={from: Same}
Namespaces RouteNamespaces `json:"namespaces,omitempty"`
// Selector specifies a set of route labels used for selecting
// routes to associate with the Gateway. If this Selector is defined,
// only routes matching the Selector are associated with the Gateway.
// An empty Selector matches all routes.
//
// Support: Core
//
// +optional
Selector metav1.LabelSelector `json:"selector,omitempty"`
// Group is the group of the route resource to select. Omitting the value or specifying
// the empty string indicates the networking.x-k8s.io API group.
// For example, use the following to select an HTTPRoute:
//
// routes:
// kind: HTTPRoute
//
// Otherwise, if an alternative API group is desired, specify the desired
// group:
//
// routes:
// group: acme.io
// kind: FooRoute
//
// Support: Core
//
// +optional
// +kubebuilder:default=networking.x-k8s.io
// +kubebuilder:validation:MinLength=1
// +kubebuilder:validation:MaxLength=253
Group string `json:"group,omitempty"`
// Kind is the kind of the route resource to select.
//
// Kind MUST correspond to kinds of routes that are compatible with the
// application protocol specified in the Listener's Protocol field.
//
// If an implementation does not support or recognize this
// resource type, it SHOULD set the "ResolvedRefs" condition to false for
// this listener with the "InvalidRoutesRef" reason.
//
// Support: Core
Kind string `json:"kind"`
}
// RouteSelectType specifies where Routes should be selected by a Gateway.
//
// +kubebuilder:validation:Enum=All;Selector;Same
type RouteSelectType string
const (
// Routes in all namespaces may be used by this Gateway.
RouteSelectAll RouteSelectType = "All"
// Only Routes in namespaces selected by the selector may be used by this Gateway.
RouteSelectSelector RouteSelectType = "Selector"
// Only Routes in the same namespace as the Gateway may be used by this Gateway.
RouteSelectSame RouteSelectType = "Same"
)
// RouteNamespaces indicate which namespaces Routes should be selected from.
type RouteNamespaces struct {
// From indicates where Routes will be selected for this Gateway. Possible
// values are:
// * All: Routes in all namespaces may be used by this Gateway.
// * Selector: Routes in namespaces selected by the selector may be used by
// this Gateway.
// * Same: Only Routes in the same namespace may be used by this Gateway.
//
// Support: Core
//
// +optional
// +kubebuilder:default=Same
From RouteSelectType `json:"from,omitempty"`
// Selector must be specified when From is set to "Selector". In that case,
// only Routes in Namespaces matching this Selector will be selected by this
// Gateway. This field is ignored for other values of "From".
//
// Support: Core
//
// +optional
Selector metav1.LabelSelector `json:"selector,omitempty"`
}
// GatewayAddress describes an address that can be bound to a Gateway.
type GatewayAddress struct {
// Type of the address.
//
// Support: Extended
//
// +optional
// +kubebuilder:default=IPAddress
Type AddressType `json:"type,omitempty"`
// Value of the address. The validity of the values will depend
// on the type and support by the controller.
//
// Examples: `1.2.3.4`, `128::1`, `my-ip-address`.
//
// +kubebuilder:validation:MinLength=1
// +kubebuilder:validation:MaxLength=253
Value string `json:"value"`
}
// AddressType defines how a network address is represented as a text string.
//
// If the requested address is unsupported, the controller
// should raise the "Detached" listener status condition on
// the Gateway with the "UnsupportedAddress" reason.
//
// +kubebuilder:validation:Enum=IPAddress;NamedAddress
type AddressType string
const (
// A textual representation of a numeric IP address. IPv4
// addresses must be in dotted-decimal form. IPv6 addresses
// must be in a standard IPv6 text representation
// (see [RFC 5952](https://tools.ietf.org/html/rfc5952)).
//
// Support: Extended
IPAddressType AddressType = "IPAddress"
// An opaque identifier that represents a specific IP address. The
// interpretation of the name is dependent on the controller. For
// example, a "NamedAddress" might be a cloud-dependent identifier
// for a static or elastic IP.
//
// Support: Implementation-specific
NamedAddressType AddressType = "NamedAddress"
)
// GatewayStatus defines the observed state of Gateway.
type GatewayStatus struct {
// Addresses lists the IP addresses that have actually been
// bound to the Gateway. These addresses may differ from the
// addresses in the Spec, e.g. if the Gateway automatically
// assigns an address from a reserved pool.
//
// These addresses should all be of type "IPAddress".
//
// +optional
// +kubebuilder:validation:MaxItems=16
Addresses []GatewayAddress `json:"addresses,omitempty"`
// Conditions describe the current conditions of the Gateway.
//
// Implementations should prefer to express Gateway conditions
// using the `GatewayConditionType` and `GatewayConditionReason`
// constants so that operators and tools can converge on a common
// vocabulary to describe Gateway state.
//
// Known condition types are:
//
// * "Scheduled"
// * "Ready"
//
// +optional
// +listType=map
// +listMapKey=type
// +kubebuilder:validation:MaxItems=8
// +kubebuilder:default={{type: "Scheduled", status: "False", reason:"NotReconciled", message:"Waiting for controller", lastTransitionTime: "1970-01-01T00:00:00Z"}}
Conditions []metav1.Condition `json:"conditions,omitempty"`
// Listeners provide status for each unique listener port defined in the Spec.
//
// +optional
// +listType=map
// +listMapKey=port
// +kubebuilder:validation:MaxItems=64
Listeners []ListenerStatus `json:"listeners,omitempty"`
}
// GatewayConditionType is a type of condition associated with a
// Gateway. This type should be used with the GatewayStatus.Conditions
// field.
type GatewayConditionType string
// GatewayConditionReason defines the set of reasons that explain
// why a particular Gateway condition type has been raised.
type GatewayConditionReason string
const (
// This condition is true when the controller managing the
// Gateway has scheduled the Gateway to the underlying network
// infrastructure.
//
// Possible reasons for this condition to be false are:
//
// * "NotReconciled"
// * "NoSuchGatewayClass"
// * "NoResources"
//
// Controllers may raise this condition with other reasons,
// but should prefer to use the reasons listed above to improve
// interoperability.
GatewayConditionScheduled GatewayConditionType = "Scheduled"
// This reason is used with the "Scheduled" condition when
// been recently created and no controller has reconciled it yet.
GatewayReasonNotReconciled GatewayConditionReason = "NotReconciled"
// This reason is used with the "Scheduled" condition when the Gateway
// is not scheduled because there is no controller that recognizes
// the GatewayClassName. This reason should only be set by
// a controller that has cluster-wide visibility of all the
// installed GatewayClasses.
GatewayReasonNoSuchGatewayClass GatewayConditionReason = "NoSuchGatewayClass"
// This reason is used with the "Scheduled" condition when the
// Gateway is not scheduled because insufficient infrastructure
// resources are available.
GatewayReasonNoResources GatewayConditionReason = "NoResources"
)
const (
// This condition is true when the Gateway is expected to be able
// to serve traffic. Note that this does not indicate that the
// Gateway configuration is current or even complete (e.g. the
// controller may still not have reconciled the latest version,
// or some parts of the configuration could be missing).
//
// If both the "ListenersNotValid" and "ListenersNotReady"
// reasons are true, the Gateway controller should prefer the
// "ListenersNotValid" reason.
//
// Possible reasons for this condition to be false are:
//
// * "ListenersNotValid"
// * "ListenersNotReady"
// * "AddressNotAssigned"
//
// Controllers may raise this condition with other reasons,
// but should prefer to use the reasons listed above to improve
// interoperability.
GatewayConditionReady GatewayConditionType = "Ready"
// This reason is used with the "Ready" condition when one or
// more Listeners have an invalid or unsupported configuration
// and cannot be configured on the Gateway.
GatewayReasonListenersNotValid GatewayConditionReason = "ListenersNotValid"
// This reason is used with the "Ready" condition when one or
// more Listeners are not ready to serve traffic.
GatewayReasonListenersNotReady GatewayConditionReason = "ListenersNotReady"
// This reason is used with the "Ready" condition when the requested
// address has not been assigned to the Gateway. This reason
// can be used to express a range of circumstances, including
// (but not limited to) IPAM address exhaustion, invalid
// or unsupported address requests, or a named address not
// being found.
GatewayReasonAddressNotAssigned GatewayConditionReason = "AddressNotAssigned"
)
// ListenerStatus is the status associated with a Listener.
type ListenerStatus struct {
// Port is the unique Listener port value for which this message is
// reporting the status.
Port PortNumber `json:"port"`
// Protocol is the Listener protocol value for which this message is
// reporting the status.
Protocol ProtocolType `json:"protocol"`
// Hostname is the Listener hostname value for which this message is
// reporting the status.
//
// +optional
Hostname *Hostname `json:"hostname,omitempty"`
// Conditions describe the current condition of this listener.
//
// +listType=map
// +listMapKey=type
// +kubebuilder:validation:MaxItems=8
Conditions []metav1.Condition `json:"conditions"`
}
// ListenerConditionType is a type of condition associated with the
// listener. This type should be used with the ListenerStatus.Conditions
// field.
type ListenerConditionType string
// ListenerConditionReason defines the set of reasons that explain
// why a particular Listener condition type has been raised.
type ListenerConditionReason string
const (
// This condition indicates that the controller was unable to resolve
// conflicting specification requirements for this Listener. If a
// Listener is conflicted, its network port should not be configured
// on any network elements.
//
// Possible reasons for this condition to be true are:
//
// * "HostnameConflict"
// * "ProtocolConflict"
// * "RouteConflict"
//
// Controllers may raise this condition with other reasons,
// but should prefer to use the reasons listed above to improve
// interoperability.
ListenerConditionConflicted ListenerConditionType = "Conflicted"
// This reason is used with the "Conflicted" condition when
// the Listener conflicts with hostnames in other Listeners. For
// example, this reason would be used when multiple Listeners on
// the same port use `*` in the hostname field.
ListenerReasonHostnameConflict ListenerConditionReason = "HostnameConflict"
// This reason is used with the "Conflicted" condition when
// multiple Listeners are specified with the same Listener port
// number, but have conflicting protocol specifications.
ListenerReasonProtocolConflict ListenerConditionReason = "ProtocolConflict"
// This reason is used with the "Conflicted" condition when the route
// resources selected for this Listener conflict with other
// specified properties of the Listener (e.g. Protocol).
// For example, a Listener that specifies "UDP" as the protocol
// but a route selector that resolves "TCPRoute" objects.
ListenerReasonRouteConflict ListenerConditionReason = "RouteConflict"
)
const (
// This condition indicates that, even though the listener is
// syntactically and semantically valid, the controller is not able
// to configure it on the underlying Gateway infrastructure.
//
// A Listener is specified as a logical requirement, but needs to be
// configured on a network endpoint (i.e. address and port) by a
// controller. The controller may be unable to attach the Listener
// if it specifies an unsupported requirement, or prerequisite
// resources are not available.
//
// Possible reasons for this condition to be true are:
//
// * "PortUnavailable"
// * "UnsupportedExtension"
// * "UnsupportedProtocol"
// * "UnsupportedAddress"
//
// Controllers may raise this condition with other reasons,
// but should prefer to use the reasons listed above to improve
// interoperability.
ListenerConditionDetached ListenerConditionType = "Detached"
// This reason is used with the "Detached" condition when the
// Listener requests a port that cannot be used on the Gateway.
ListenerReasonPortUnavailable ListenerConditionReason = "PortUnavailable"
// This reason is used with the "Detached" condition when the
// controller detects that an implementation-specific Listener
// extension is being requested, but is not able to support
// the extension.
ListenerReasonUnsupportedExtension ListenerConditionReason = "UnsupportedExtension"
// This reason is used with the "Detached" condition when the
// Listener could not be attached to be Gateway because its
// protocol type is not supported.
ListenerReasonUnsupportedProtocol ListenerConditionReason = "UnsupportedProtocol"
// This reason is used with the "Detached" condition when
// the Listener could not be attached to the Gateway because the
// requested address is not supported.
ListenerReasonUnsupportedAddress ListenerConditionReason = "UnsupportedAddress"
)
const (
// This condition indicates whether the controller was able to
// resolve all the object references for the Listener.
//
// Possible reasons for this condition to be false are:
//
// * "DegradedRoutes"
// * "InvalidCertificateRef"
// * "InvalidRoutesRef"
//
// Controllers may raise this condition with other reasons,
// but should prefer to use the reasons listed above to improve
// interoperability.
ListenerConditionResolvedRefs ListenerConditionType = "ResolvedRefs"
// This reason is used with the "ResolvedRefs" condition
// when not all of the routes selected by this Listener could be
// configured. The specific reason for the degraded route should
// be indicated in the route's .Status.Conditions field.
ListenerReasonDegradedRoutes ListenerConditionReason = "DegradedRoutes"
// This reason is used with the "ResolvedRefs" condition when the
// Listener has a TLS configuration with a TLS CertificateRef
// that is invalid or cannot be resolved.
ListenerReasonInvalidCertificateRef ListenerConditionReason = "InvalidCertificateRef"
// This reason is used with the "ResolvedRefs" condition when
// the Listener's Routes selector or kind is invalid or cannot
// be resolved. Note that it is not an error for this selector to
// not resolve any Routes, and the "ResolvedRefs" status condition
// should not be raised in that case.
ListenerReasonInvalidRoutesRef ListenerConditionReason = "InvalidRoutesRef"
)
const (
// This condition indicates whether the Listener has been
// configured on the Gateway.
//
// Possible reasons for this condition to be false are:
//
// * "Invalid"
// * "Pending"
//
// Controllers may raise this condition with other reasons,
// but should prefer to use the reasons listed above to improve
// interoperability.
ListenerConditionReady ListenerConditionType = "Ready"
// This reason is used with the "Ready" condition when the
// Listener is syntactically or semantically invalid.
ListenerReasonInvalid ListenerConditionReason = "Invalid"
// This reason is used with the "Ready" condition when the
// Listener is not yet not online and ready to accept client
// traffic.
ListenerReasonPending ListenerConditionReason = "Pending"
)<|fim▁end|> | |
<|file_name|>chat.js<|end_file_name|><|fim▁begin|>/*
* author: the5fire
* blog: the5fire.com
* date: 2014-03-16
* */
$(function(){
WEB_SOCKET_SWF_LOCATION = "/static/WebSocketMain.swf";
WEB_SOCKET_DEBUG = true;
var socket = io.connect();
socket.on('connect', function(){
console.log('connected');
});
$(window).bind("beforeunload", function() {
socket.disconnect();
});
var User = Backbone.Model.extend({
urlRoot: '/user',
});
var Topic = Backbone.Model.extend({
urlRoot: '/topic',
});
var Message = Backbone.Model.extend({
urlRoot: '/message',
sync: function(method, model, options) {
if (method === 'create') {
socket.emit('message', model.attributes);
// 错误处理没做
$('#comment').val('');
} else {
return Backbone.sync(method, model, options);
};
},
});
var Topics = Backbone.Collection.extend({
url: '/topic',
model: Topic,
});
var Messages = Backbone.Collection.extend({
url: '/message',
model: Message,
});
var topics = new Topics;
var TopicView = Backbone.View.extend({
tagName: "div class='column'",
templ: _.template($('#topic-template').html()),
// 渲染列表页模板
render: function() {
$(this.el).html(this.templ(this.model.toJSON()));
return this;
},
});
var messages = new Messages;
var MessageView = Backbone.View.extend({
tagName: "div class='comment'",
templ: _.template($('#message-template').html()),
// 渲染列表页模板
render: function() {
$(this.el).html(this.templ(this.model.toJSON()));
return this;
},
});
var AppView = Backbone.View.extend({
el: "#main",
topic_list: $("#topic_list"),
topic_section: $("#topic_section"),
message_section: $("#message_section"),
message_list: $("#message_list"),
message_head: $("#message_head"),
events: {
'click .submit': 'saveMessage',
'click .submit_topic': 'saveTopic',
'keypress #comment': 'saveMessageEvent',
},
initialize: function() {
_.bindAll(this, 'addTopic', 'addMessage');
topics.bind('add', this.addTopic);
// 定义消息列表池,每个topic有自己的message collection
// 这样保证每个主题下得消息不冲突
this.message_pool = {};
this.socket = null;
this.message_list_div = document.getElementById('message_list');
},
addTopic: function(topic) {
var view = new TopicView({model: topic});
this.topic_list.append(view.render().el);
},
addMessage: function(message) {
var view = new MessageView({model: message});
this.message_list.append(view.render().el);
self.message_list.scrollTop(self.message_list_div.scrollHeight);
},
saveMessageEvent: function(evt) {
if (evt.keyCode == 13) {
this.saveMessage(evt);
}
},
saveMessage: function(evt) {
var comment_box = $('#comment')
var content = comment_box.val();
if (content == '') {
alert('内容不能为空');
return false;
}
var topic_id = comment_box.attr('topic_id');
var message = new Message({<|fim▁hole|> var messages = this.message_pool[topic_id];
message.save(); // 依赖上面对sync的重载
},
saveTopic: function(evt) {
var topic_title = $('#topic_title');
if (topic_title.val() == '') {
alert('主题不能为空!');
return false
}
var topic = new Topic({
title: topic_title.val(),
});
self = this;
topic.save(null, {
success: function(model, response, options){
topics.add(response);
topic_title.val('');
},
error: function(model, resp, options) {
alert(resp.responseText);
}
});
},
showTopic: function(){
topics.fetch();
this.topic_section.show();
this.message_section.hide();
this.message_list.html('');
this.goOut()
},
goOut: function(){
// 退出房间
socket.emit('go_out');
socket.removeAllListeners('message');
},
initMessage: function(topic_id) {
var messages = new Messages;
messages.bind('add', this.addMessage);
this.message_pool[topic_id] = messages;
},
showMessage: function(topic_id) {
this.initMessage(topic_id);
this.message_section.show();
this.topic_section.hide();
this.showMessageHead(topic_id);
$('#comment').attr('topic_id', topic_id);
var messages = this.message_pool[topic_id];
// 进入房间
socket.emit('topic', topic_id);
// 监听message事件,添加对话到messages中
socket.on('message', function(response) {
messages.add(response);
});
messages.fetch({
data: {topic_id: topic_id},
success: function(resp) {
self.message_list.scrollTop(self.message_list_div.scrollHeight)
},
error: function(model, resp, options) {
alert(resp.responseText);
}
});
},
showMessageHead: function(topic_id) {
var topic = new Topic({id: topic_id});
self = this;
topic.fetch({
success: function(resp, model, options){
self.message_head.html(model.title);
},
error: function(model, resp, options) {
alert(resp.responseText);
}
});
},
});
var LoginView = Backbone.View.extend({
el: "#login",
wrapper: $('#wrapper'),
events: {
'keypress #login_pwd': 'loginEvent',
'click .login_submit': 'login',
'keypress #reg_pwd_repeat': 'registeEvent',
'click .registe_submit': 'registe',
},
hide: function() {
this.wrapper.hide();
},
show: function() {
this.wrapper.show();
},
loginEvent: function(evt) {
if (evt.keyCode == 13) {
this.login(evt);
}
},
login: function(evt){
var username_input = $('#login_username');
var pwd_input = $('#login_pwd');
var u = new User({
username: username_input.val(),
password: pwd_input.val(),
});
u.save(null, {
url: '/login',
success: function(model, resp, options){
g_user = resp;
// 跳转到index
appRouter.navigate('index', {trigger: true});
},
error: function(model, resp, options) {
alert(resp.responseText);
}
});
},
registeEvent: function(evt) {
if (evt.keyCode == 13) {
this.registe(evt);
}
},
registe: function(evt){
var reg_username_input = $('#reg_username');
var reg_pwd_input = $('#reg_pwd');
var reg_pwd_repeat_input = $('#reg_pwd_repeat');
var u = new User({
username: reg_username_input.val(),
password: reg_pwd_input.val(),
password_repeat: reg_pwd_repeat_input.val(),
});
u.save(null, {
success: function(model, resp, options){
g_user = resp;
// 跳转到index
appRouter.navigate('index', {trigger: true});
},
error: function(model, resp, options) {
alert(resp.responseText);
}
});
},
});
var UserView = Backbone.View.extend({
el: "#user_info",
username: $('#username'),
show: function(username) {
this.username.html(username);
this.$el.show();
},
});
var AppRouter = Backbone.Router.extend({
routes: {
"login": "login",
"index": "index",
"topic/:id" : "topic",
},
initialize: function(){
// 初始化项目, 显示首页
this.appView = new AppView();
this.loginView = new LoginView();
this.userView = new UserView();
this.indexFlag = false;
},
login: function(){
this.loginView.show();
},
index: function(){
if (g_user && g_user.id != undefined) {
this.appView.showTopic();
this.userView.show(g_user.username);
this.loginView.hide();
this.indexFlag = true; // 标志已经到达主页了
}
},
topic: function(topic_id) {
if (g_user && g_user.id != undefined) {
this.appView.showMessage(topic_id);
this.userView.show(g_user.username);
this.loginView.hide();
this.indexFlag = true; // 标志已经到达主页了
}
},
});
var appRouter = new AppRouter();
var g_user = new User;
g_user.fetch({
success: function(model, resp, options){
g_user = resp;
Backbone.history.start({pustState: true});
if(g_user === null || g_user.id === undefined) {
// 跳转到登录页面
appRouter.navigate('login', {trigger: true});
} else if (appRouter.indexFlag == false){
// 跳转到首页
appRouter.navigate('index', {trigger: true});
}
},
error: function(model, resp, options) {
alert(resp.responseText);
}
}); // 获取当前用户
});<|fim▁end|> | content: content,
topic_id: topic_id,
}); |
<|file_name|>Vdemo.java<|end_file_name|><|fim▁begin|>package com.baomidou.hibernateplus.spring.vo;
import java.io.Serializable;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import com.baomidou.hibernateplus.entity.Convert;
/**
* <p>
* Vdemo
* </p>
*
* @author Caratacus
* @date 2016-12-2
*/
public class Vdemo extends Convert implements Serializable {
protected Long id;
private String demo1;
private String demo2;
private String demo3;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getDemo1() {
return demo1;
}
public void setDemo1(String demo1) {
this.demo1 = demo1;
}
public String getDemo2() {<|fim▁hole|> public void setDemo2(String demo2) {
this.demo2 = demo2;
}
public String getDemo3() {
return demo3;
}
public void setDemo3(String demo3) {
this.demo3 = demo3;
}
}<|fim▁end|> | return demo2;
}
|
<|file_name|>tsan_mutex.cc<|end_file_name|><|fim▁begin|>//===-- tsan_mutex.cc -----------------------------------------------------===//
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file is a part of ThreadSanitizer (TSan), a race detector.
//
//===----------------------------------------------------------------------===//
#include "sanitizer_common/sanitizer_libc.h"
#include "tsan_mutex.h"
#include "tsan_platform.h"
#include "tsan_rtl.h"
namespace __tsan {
// Simple reader-writer spin-mutex. Optimized for not-so-contended case.
// Readers have preference, can possibly starvate writers.
// The table fixes what mutexes can be locked under what mutexes.
// E.g. if the row for MutexTypeThreads contains MutexTypeReport,
// then Report mutex can be locked while under Threads mutex.
// The leaf mutexes can be locked under any other mutexes.
// Recursive locking is not supported.
#if TSAN_DEBUG && !TSAN_GO
const MutexType MutexTypeLeaf = (MutexType)-1;
static MutexType CanLockTab[MutexTypeCount][MutexTypeCount] = {
/*0 MutexTypeInvalid*/ {},
/*1 MutexTypeTrace*/ {MutexTypeLeaf},
/*2 MutexTypeThreads*/ {MutexTypeReport},
/*3 MutexTypeReport*/ {MutexTypeSyncTab, MutexTypeSyncVar,
MutexTypeMBlock, MutexTypeJavaMBlock},
/*4 MutexTypeSyncVar*/ {},
/*5 MutexTypeSyncTab*/ {MutexTypeSyncVar},
/*6 MutexTypeSlab*/ {MutexTypeLeaf},
/*7 MutexTypeAnnotations*/ {},
/*8 MutexTypeAtExit*/ {MutexTypeSyncTab},
/*9 MutexTypeMBlock*/ {MutexTypeSyncVar},
/*10 MutexTypeJavaMBlock*/ {MutexTypeSyncVar},
};
static bool CanLockAdj[MutexTypeCount][MutexTypeCount];
#endif
void InitializeMutex() {
#if TSAN_DEBUG && !TSAN_GO
// Build the "can lock" adjacency matrix.
// If [i][j]==true, then one can lock mutex j while under mutex i.
const int N = MutexTypeCount;
int cnt[N] = {};
bool leaf[N] = {};
for (int i = 1; i < N; i++) {
for (int j = 0; j < N; j++) {
MutexType z = CanLockTab[i][j];
if (z == MutexTypeInvalid)
continue;
if (z == MutexTypeLeaf) {
CHECK(!leaf[i]);
leaf[i] = true;
continue;
}
CHECK(!CanLockAdj[i][(int)z]);
CanLockAdj[i][(int)z] = true;
cnt[i]++;
}
}
for (int i = 0; i < N; i++) {
CHECK(!leaf[i] || cnt[i] == 0);
}
// Add leaf mutexes.
for (int i = 0; i < N; i++) {
if (!leaf[i])
continue;
for (int j = 0; j < N; j++) {
if (i == j || leaf[j] || j == MutexTypeInvalid)
continue;
CHECK(!CanLockAdj[j][i]);
CanLockAdj[j][i] = true;
}
}
// Build the transitive closure.
bool CanLockAdj2[MutexTypeCount][MutexTypeCount];
for (int i = 0; i < N; i++) {
for (int j = 0; j < N; j++) {
CanLockAdj2[i][j] = CanLockAdj[i][j];
}
}
for (int k = 0; k < N; k++) {
for (int i = 0; i < N; i++) {
for (int j = 0; j < N; j++) {
if (CanLockAdj2[i][k] && CanLockAdj2[k][j]) {
CanLockAdj2[i][j] = true;
}
}
}
}
#if 0
Printf("Can lock graph:\n");
for (int i = 0; i < N; i++) {
for (int j = 0; j < N; j++) {
Printf("%d ", CanLockAdj[i][j]);
}
Printf("\n");
}
Printf("Can lock graph closure:\n");
for (int i = 0; i < N; i++) {
for (int j = 0; j < N; j++) {
Printf("%d ", CanLockAdj2[i][j]);
}
Printf("\n");
}
#endif
// Verify that the graph is acyclic.
for (int i = 0; i < N; i++) {
if (CanLockAdj2[i][i]) {
Printf("Mutex %d participates in a cycle\n", i);
Die();
}
}
#endif
}
DeadlockDetector::DeadlockDetector() {
// Rely on zero initialization because some mutexes can be locked before ctor.
}
#if TSAN_DEBUG && !TSAN_GO
void DeadlockDetector::Lock(MutexType t) {
// Printf("LOCK %d @%zu\n", t, seq_ + 1);
CHECK_GT(t, MutexTypeInvalid);
CHECK_LT(t, MutexTypeCount);
u64 max_seq = 0;
u64 max_idx = MutexTypeInvalid;
for (int i = 0; i != MutexTypeCount; i++) {
if (locked_[i] == 0)
continue;
CHECK_NE(locked_[i], max_seq);
if (max_seq < locked_[i]) {
max_seq = locked_[i];
max_idx = i;
}
}
locked_[t] = ++seq_;
if (max_idx == MutexTypeInvalid)
return;
// Printf(" last %d @%zu\n", max_idx, max_seq);
if (!CanLockAdj[max_idx][t]) {
Printf("ThreadSanitizer: internal deadlock detected\n");
Printf("ThreadSanitizer: can't lock %d while under %zu\n",
t, (uptr)max_idx);
CHECK(0);
}
}
void DeadlockDetector::Unlock(MutexType t) {
// Printf("UNLO %d @%zu #%zu\n", t, seq_, locked_[t]);
CHECK(locked_[t]);
locked_[t] = 0;
}
#endif
const uptr kUnlocked = 0;
const uptr kWriteLock = 1;
const uptr kReadLock = 2;
class Backoff {
public:
Backoff()
: iter_() {
}
bool Do() {
if (iter_++ < kActiveSpinIters)
proc_yield(kActiveSpinCnt);
else
internal_sched_yield();
return true;
}
u64 Contention() const {
u64 active = iter_ % kActiveSpinIters;
u64 passive = iter_ - active;
return active + 10 * passive;
}
private:
int iter_;
static const int kActiveSpinIters = 10;
static const int kActiveSpinCnt = 20;
};
Mutex::Mutex(MutexType type, StatType stat_type) {
CHECK_GT(type, MutexTypeInvalid);
CHECK_LT(type, MutexTypeCount);
#if TSAN_DEBUG
type_ = type;
#endif
#if TSAN_COLLECT_STATS
stat_type_ = stat_type;
#endif
atomic_store(&state_, kUnlocked, memory_order_relaxed);
}
Mutex::~Mutex() {
CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
}
void Mutex::Lock() {
#if TSAN_DEBUG && !TSAN_GO
cur_thread()->deadlock_detector.Lock(type_);
#endif
uptr cmp = kUnlocked;
if (atomic_compare_exchange_strong(&state_, &cmp, kWriteLock,
memory_order_acquire))
return;
for (Backoff backoff; backoff.Do();) {
if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) {
cmp = kUnlocked;
if (atomic_compare_exchange_weak(&state_, &cmp, kWriteLock,
memory_order_acquire)) {
#if TSAN_COLLECT_STATS
StatInc(cur_thread(), stat_type_, backoff.Contention());
#endif<|fim▁hole|> }
}
}
void Mutex::Unlock() {
uptr prev = atomic_fetch_sub(&state_, kWriteLock, memory_order_release);
(void)prev;
DCHECK_NE(prev & kWriteLock, 0);
#if TSAN_DEBUG && !TSAN_GO
cur_thread()->deadlock_detector.Unlock(type_);
#endif
}
void Mutex::ReadLock() {
#if TSAN_DEBUG && !TSAN_GO
cur_thread()->deadlock_detector.Lock(type_);
#endif
uptr prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire);
if ((prev & kWriteLock) == 0)
return;
for (Backoff backoff; backoff.Do();) {
prev = atomic_load(&state_, memory_order_acquire);
if ((prev & kWriteLock) == 0) {
#if TSAN_COLLECT_STATS
StatInc(cur_thread(), stat_type_, backoff.Contention());
#endif
return;
}
}
}
void Mutex::ReadUnlock() {
uptr prev = atomic_fetch_sub(&state_, kReadLock, memory_order_release);
(void)prev;
DCHECK_EQ(prev & kWriteLock, 0);
DCHECK_GT(prev & ~kWriteLock, 0);
#if TSAN_DEBUG && !TSAN_GO
cur_thread()->deadlock_detector.Unlock(type_);
#endif
}
void Mutex::CheckLocked() {
CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
}
} // namespace __tsan<|fim▁end|> | return;
} |
<|file_name|>mp3file.cpp<|end_file_name|><|fim▁begin|>/* id3ted: mp3file.cpp
* Copyright (c) 2011 Bert Muennich <be.muennich at googlemail.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include <iostream>
#include <sstream>
#include <cctype>
#include <cstdio>
#include <cstring>
#include <taglib/id3v1tag.h>
#include <taglib/id3v1genres.h>
#include <taglib/id3v2tag.h>
#include <taglib/attachedpictureframe.h>
#include <taglib/commentsframe.h>
#include <taglib/textidentificationframe.h>
#include <taglib/unsynchronizedlyricsframe.h>
#include <taglib/urllinkframe.h>
#include "mp3file.h"
#include "fileio.h"
#include "frametable.h"
MP3File::MP3File(const char *filename, int _tags, bool lame) :
file(filename), id3Tag(NULL), id3v1Tag(NULL), id3v2Tag(NULL),
lameTag(NULL), tags(_tags) {
if (file.isValid()) {
id3v1Tag = file.ID3v1Tag(tags & 1);
id3v2Tag = file.ID3v2Tag(tags & 2);
id3Tag = file.tag();
if (tags == 0) {
// tag version to write not given on command line
// -> write only the tags already in the file
if (id3v1Tag != NULL && !id3v1Tag->isEmpty())
tags |= 1;
if (id3v2Tag != NULL && !id3v2Tag->isEmpty())
tags |= 2;
if (tags == 0) {
// no tags found -> use version 2 as default
tags = 2;
id3v2Tag = file.ID3v2Tag(true);
}
}
if (lame) {
long frameOffset, frameLength;
if (id3v2Tag != NULL && !id3v2Tag->isEmpty())
frameOffset = file.firstFrameOffset();
else
frameOffset = file.nextFrameOffset(0);
frameLength = file.nextFrameOffset(frameOffset + 1) - frameOffset;
lameTag = new LameTag(filename, frameOffset, frameLength);
}
}
}
MP3File::~MP3File() {
if (lameTag != NULL)
delete lameTag;
}
bool MP3File::hasLameTag() const {
return lameTag != NULL && lameTag->isValid();
}
bool MP3File::hasID3v1Tag() const {
return id3v1Tag != NULL && !id3v1Tag->isEmpty();
}
bool MP3File::hasID3v2Tag() const {
return id3v1Tag != NULL && !id3v2Tag->isEmpty();
}
void MP3File::apply(GenericInfo *info) {
if (info == NULL)
return;
if (!file.isValid() || file.readOnly())
return;
if (id3Tag == NULL) {
id3Tag = file.tag();
if (id3Tag == NULL)
return;
}
switch(info->id()) {
case 'a': {
id3Tag->setArtist(info->value());
break;
}
case 'A': {
id3Tag->setAlbum(info->value());
break;
}
case 't': {
id3Tag->setTitle(info->value());
break;
}
case 'c': {
id3Tag->setComment(info->value());
break;
}
case 'g': {
id3Tag->setGenre(info->value());
break;
}
case 'T': {
if (tags & 1) {
int slash = info->value().find('/', 0);
if (slash < 0)
slash = info->value().length();
id3Tag->setTrack(info->value().substr(0, slash).toInt());
}
if (tags & 2) {
FrameInfo trackInfo(FrameTable::textFrameID(FID3_TRCK),
FID3_TRCK, info->value().toCString(USE_UTF8));
apply(&trackInfo);
}
break;
}
case 'y': {
id3Tag->setYear(info->value().toInt());
break;
}
}
}
void MP3File::apply(FrameInfo *info) {
if (!file.isValid() || file.readOnly())
return;
if (id3v2Tag == NULL || info == NULL)
return;
vector<ID3v2::Frame*> frameList = find(info);
vector<ID3v2::Frame*>::iterator eachFrame = frameList.begin();
if (info->text().isEmpty() && info->fid() != FID3_APIC) {
if (!frameList.empty()) {
for (; eachFrame != frameList.end(); ++eachFrame)
id3v2Tag->removeFrame(*eachFrame);
}
} else {
if (frameList.empty() || info->fid() == FID3_APIC) {
switch (info->fid()) {
case FID3_APIC: {
ID3v2::AttachedPictureFrame *apic;
for (; eachFrame != frameList.end(); ++eachFrame) {
apic = dynamic_cast<ID3v2::AttachedPictureFrame*>(*eachFrame);
if (apic != NULL && apic->picture() == info->data())
return;
}
apic = new ID3v2::AttachedPictureFrame();
apic->setMimeType(info->description());
apic->setType(ID3v2::AttachedPictureFrame::FrontCover);
apic->setPicture(info->data());
id3v2Tag->addFrame(apic);
break;
}
case FID3_COMM: {
if (info->text().isEmpty())
return;
ID3v2::CommentsFrame *comment = new ID3v2::CommentsFrame(DEF_TSTR_ENC);
comment->setText(info->text());<|fim▁hole|> break;
}
case FID3_TXXX: {
ID3v2::UserTextIdentificationFrame *userText =
new ID3v2::UserTextIdentificationFrame(DEF_TSTR_ENC);
userText->setText(info->text());
userText->setDescription(info->description());
id3v2Tag->addFrame(userText);
break;
}
case FID3_USLT: {
ID3v2::UnsynchronizedLyricsFrame *lyrics =
new ID3v2::UnsynchronizedLyricsFrame(DEF_TSTR_ENC);
lyrics->setText(info->text());
lyrics->setDescription(info->description());
lyrics->setLanguage(info->language());
id3v2Tag->addFrame(lyrics);
break;
}
case FID3_WCOM:
case FID3_WCOP:
case FID3_WOAF:
case FID3_WOAR:
case FID3_WOAS:
case FID3_WORS:
case FID3_WPAY:
case FID3_WPUB: {
ID3v2::UrlLinkFrame *urlLink = new ID3v2::UrlLinkFrame(info->id());
urlLink->setUrl(info->text());
id3v2Tag->addFrame(urlLink);
break;
}
case FID3_WXXX: {
ID3v2::UserUrlLinkFrame *userUrl =
new ID3v2::UserUrlLinkFrame(DEF_TSTR_ENC);
userUrl->setUrl(info->text());
userUrl->setDescription(info->description());
id3v2Tag->addFrame(userUrl);
break;
}
default: {
ID3v2::TextIdentificationFrame *textFrame =
new ID3v2::TextIdentificationFrame(info->id(), DEF_TSTR_ENC);
textFrame->setText(info->text());
id3v2Tag->addFrame(textFrame);
break;
}
}
} else {
frameList.front()->setText(info->text());
}
}
}
void MP3File::apply(const MatchInfo &info) {
if (!file.isValid() || file.readOnly())
return;
if (info.id == 0 || info.text.length() == 0)
return;
switch (info.id) {
case 'a':
case 'A':
case 't':
case 'c':
case 'g':
case 'T':
case 'y': {
GenericInfo genInfo(info.id, info.text.c_str());
apply(&genInfo);
break;
}
case 'd': {
if (id3v2Tag != NULL) {
FrameInfo frameInfo("TPOS", FID3_TPOS, info.text.c_str());
apply(&frameInfo);
}
break;
}
}
}
void MP3File::fill(MatchInfo &info) {
string &text = info.text;
ostringstream tmp;
tmp.fill('0');
if (!file.isValid())
return;
if (info.id == 0)
return;
switch (info.id) {
case 'a':
text = id3Tag->artist().toCString(USE_UTF8);
if (text.empty())
text = "Unknown Artist";
break;
case 'A':
text = id3Tag->album().toCString(USE_UTF8);
if (text.empty())
text = "Unknown Album";
break;
case 't':
text = id3Tag->title().toCString(USE_UTF8);
if (text.empty())
text = "Unknown Title";
break;
case 'g':
text = id3Tag->genre().toCString(USE_UTF8);
break;
case 'y': {
uint year = id3Tag->year();
if (year) {
tmp << year;
text = tmp.str();
}
break;
}
case 'T': {
uint track = id3Tag->track();
if (track) {
tmp.width(2);
tmp << track;
text = tmp.str();
}
break;
}
case 'd': {
if (id3v2Tag != NULL) {
ID3v2::FrameList list = id3v2Tag->frameListMap()["TPOS"];
if (!list.isEmpty()) {
uint disc = list.front()->toString().toInt();
if (disc) {
tmp << disc;
text = tmp.str();
}
}
}
break;
}
}
}
void MP3File::removeFrames(const char *textFID) {
if (textFID == NULL)
return;
if (!file.isValid() || file.readOnly())
return;
if (id3v2Tag != NULL)
id3v2Tag->removeFrames(textFID);
}
bool MP3File::save() {
if (!file.isValid() || file.readOnly())
return false;
// bug in TagLib 1.5.0?: deleting solely frame in id3v2 tag and
// then saving file causes the recovery of the last deleted frame.
// solution: strip the whole tag if it is empty before writing file!
if (tags & 2 && id3v2Tag != NULL && id3v2Tag->isEmpty())
strip(2);
return file.save(tags, false);
}
bool MP3File::strip(int tags) {
if (!file.isValid() || file.readOnly())
return false;
return file.strip(tags);
}
void MP3File::showInfo() const {
MPEG::Properties *properties;
const char *version;
const char *channelMode;
if (!file.isValid())
return;
if ((properties = file.audioProperties()) == NULL)
return;
switch (properties->version()) {
case 1:
version = "2";
break;
case 2:
version = "2.5";
break;
default:
version = "1";
break;
}
switch (properties->channelMode()) {
case 0:
channelMode = "Stereo";
break;
case 1:
channelMode = "JointStereo";
break;
case 2:
channelMode = "DualChannel";
break;
default:
channelMode = "SingleChannel";
break;
}
int length = properties->length();
printf("MPEG %s Layer %d %s\n", version, properties->layer(), channelMode);
printf("bitrate: %d kBit/s, sample rate: %d Hz, length: %02d:%02d:%02d\n",
properties->bitrate(), properties->sampleRate(),
length / 3600, length / 60, length % 60);
}
void MP3File::printLameTag(bool checkCRC) const {
if (!file.isValid())
return;
if (lameTag != NULL)
lameTag->print(checkCRC);
}
void MP3File::listID3v1Tag() const {
if (!file.isValid())
return;
if (id3v1Tag == NULL || id3v1Tag->isEmpty())
return;
int year = id3v1Tag->year();
TagLib::String genreStr = id3v1Tag->genre();
int genre = ID3v1::genreIndex(genreStr);
printf("ID3v1:\n");
printf("Title : %-30s Track: %d\n",
id3v1Tag->title().toCString(USE_UTF8), id3v1Tag->track());
printf("Artist : %-30s Year : %-4s\n",
id3v1Tag->artist().toCString(USE_UTF8),
(year != 0 ? TagLib::String::number(year).toCString() : ""));
printf("Album : %-30s Genre: %s (%d)\n",
id3v1Tag->album().toCString(USE_UTF8),
(genre == 255 ? "Unknown" : genreStr.toCString()), genre);
printf("Comment: %s\n", id3v1Tag->comment().toCString(USE_UTF8));
}
void MP3File::listID3v2Tag(bool withDesc) const {
if (!file.isValid())
return;
if (id3v2Tag == NULL || id3v2Tag->isEmpty())
return;
int frameCount = id3v2Tag->frameList().size();
cout << "ID3v2." << id3v2Tag->header()->majorVersion() << " - "
<< frameCount << (frameCount != 1 ? " frames:" : " frame:") << endl;
ID3v2::FrameList::ConstIterator frame = id3v2Tag->frameList().begin();
for (; frame != id3v2Tag->frameList().end(); ++frame) {
String textFID((*frame)->frameID(), DEF_TSTR_ENC);
cout << textFID;
if (withDesc)
cout << " (" << FrameTable::frameDescription(textFID) << ")";
cout << ": ";
switch (FrameTable::frameID(textFID)) {
case FID3_APIC: {
ID3v2::AttachedPictureFrame *apic =
dynamic_cast<ID3v2::AttachedPictureFrame*>(*frame);
if (apic != NULL) {
int size = apic->picture().size();
cout << apic->mimeType() << ", " << FileIO::sizeHumanReadable(size);
}
break;
}
case FID3_COMM: {
ID3v2::CommentsFrame *comment =
dynamic_cast<ID3v2::CommentsFrame*>(*frame);
if (comment != NULL) {
TagLib::ByteVector lang = comment->language();
bool showLanguage = lang.size() == 3 && isalpha(lang[0]) &&
isalpha(lang[1]) && isalpha(lang[2]);
cout << "[" << comment->description().toCString(USE_UTF8) << "]";
if (showLanguage)
cout << "(" << lang[0] << lang[1] << lang[2];
else
cout << "(XXX";
cout << "): " << comment->toString().toCString(USE_UTF8);
}
break;
}
case FID3_TCON: {
String genreStr = (*frame)->toString();
int genre = 255;
sscanf(genreStr.toCString(), "(%d)", &genre);
if (genre == 255)
sscanf(genreStr.toCString(), "%d", &genre);
if (genre != 255)
genreStr = ID3v1::genre(genre);
cout << genreStr;
break;
}
case FID3_USLT: {
ID3v2::UnsynchronizedLyricsFrame *lyrics =
dynamic_cast<ID3v2::UnsynchronizedLyricsFrame*>(*frame);
if (lyrics != NULL) {
const char *text = lyrics->text().toCString(USE_UTF8);
const char *indent = " ";
TagLib::ByteVector lang = lyrics->language();
bool showLanguage = lang.size() == 3 && isalpha(lang[0]) &&
isalpha(lang[1]) && isalpha(lang[2]);
cout << "[" << lyrics->description().toCString(USE_UTF8) << "]";
if (showLanguage)
cout << "(" << lang[0] << lang[1] << lang[2];
else
cout << "(XXX";
cout << "):\n" << indent;
while (*text != '\0') {
if (*text == (char) 10 || *text == (char) 13)
cout << "\n" << indent;
else
putchar(*text);
++text;
}
}
break;
}
case FID3_TXXX: {
ID3v2::UserTextIdentificationFrame *userText =
dynamic_cast<ID3v2::UserTextIdentificationFrame*>(*frame);
if (userText != NULL) {
StringList textList = userText->fieldList();
cout << "[" << userText->description().toCString(USE_UTF8)
<< "]: ";
if (textList.size() > 1)
cout << textList[1].toCString(USE_UTF8);
}
break;
}
case FID3_WXXX: {
ID3v2::UserUrlLinkFrame *userUrl =
dynamic_cast<ID3v2::UserUrlLinkFrame*>(*frame);
if (userUrl != NULL)
cout << "[" << userUrl->description().toCString(USE_UTF8)
<< "]: " << userUrl->url().toCString(USE_UTF8);
break;
}
case FID3_XXXX: {
break;
}
default:
cout << (*frame)->toString().toCString(USE_UTF8);
break;
}
cout << endl;
}
}
void MP3File::extractAPICs(bool overwrite) const {
if (!file.isValid() || id3v2Tag == NULL)
return;
int num = 0;
const char *mimetype, *filetype;
ostringstream filename;
ID3v2::FrameList apicList = id3v2Tag->frameListMap()["APIC"];
ID3v2::FrameList::ConstIterator each = apicList.begin();
for (; each != apicList.end(); ++each) {
ID3v2::AttachedPictureFrame *apic =
dynamic_cast<ID3v2::AttachedPictureFrame*>(*each);
if (apic == NULL)
continue;
mimetype = apic->mimeType().toCString();
if (mimetype != NULL && strlen(mimetype) > 0) {
filetype = strrchr(mimetype, '/');
if (filetype != NULL && strlen(filetype+1) > 0)
++filetype;
else
filetype = mimetype;
} else {
filetype = "bin";
}
filename.str("");
filename << file.name() << ".apic-" << (++num < 10 ? "0" : "");
filename << num << "." << filetype;
if (FileIO::exists(filename.str().c_str())) {
if (!overwrite && !FileIO::confirmOverwrite(filename.str().c_str()))
continue;
}
OFile outFile(filename.str().c_str());
if (!outFile.isOpen())
continue;
outFile.write(apic->picture());
if (outFile.error())
warn("%s: Could not write file", filename.str().c_str());
outFile.close();
}
}
vector<ID3v2::Frame*> MP3File::find(FrameInfo *info) {
vector<ID3v2::Frame*> list;
if (id3v2Tag == NULL || info == NULL)
return list;
ID3v2::FrameList frameList = id3v2Tag->frameListMap()[info->id()];
ID3v2::FrameList::ConstIterator each = frameList.begin();
for (; each != frameList.end(); ++each) {
switch (FrameTable::frameID((*each)->frameID())) {
case FID3_APIC: {
ID3v2::AttachedPictureFrame *apic =
dynamic_cast<ID3v2::AttachedPictureFrame*>(*each);
if (apic == NULL)
continue;
if (info->data() == apic->picture() &&
info->description() == apic->mimeType())
list.push_back(*each);
break;
}
case FID3_COMM: {
ID3v2::CommentsFrame *comment =
dynamic_cast<ID3v2::CommentsFrame*>(*each);
if (comment == NULL)
continue;
if (comment->language().isEmpty())
comment->setLanguage("XXX");
if (info->description() == comment->description() &&
info->language() == comment->language())
list.push_back(*each);
break;
}
case FID3_TXXX: {
ID3v2::UserTextIdentificationFrame *userText =
dynamic_cast<ID3v2::UserTextIdentificationFrame*>(*each);
if (userText == NULL)
continue;
if (info->description() == userText->description())
list.push_back(*each);
break;
}
case FID3_USLT: {
ID3v2::UnsynchronizedLyricsFrame *lyrics =
dynamic_cast<ID3v2::UnsynchronizedLyricsFrame*>(*each);
if (lyrics == NULL)
continue;
if (lyrics->language().isEmpty())
lyrics->setLanguage("XXX");
if (info->description() == lyrics->description() &&
info->language() == lyrics->language())
list.push_back(*each);
break;
}
case FID3_WXXX: {
ID3v2::UserUrlLinkFrame *userUrl =
dynamic_cast<ID3v2::UserUrlLinkFrame*>(*each);
if (userUrl == NULL)
continue;
if (info->description() == userUrl->description())
list.push_back(*each);
break;
}
default:
list.push_back(*each);
break;
}
}
return list;
}<|fim▁end|> | comment->setDescription(info->description());
comment->setLanguage(info->language());
id3v2Tag->addFrame(comment); |
<|file_name|>dic.service.ts<|end_file_name|><|fim▁begin|>import {Service} from './index';
export class GetDicInfoService extends Service{
constructor(http) {
super(http);
}
get(): Promise<any> {
return this.post('getDicInfo', {})
.then(response => response.json().body)
.catch(this.handleError)
}
}
export class GetDicItemService extends Service{
constructor(http) {
super(http);
}
get(dicId): Promise<any> {
return this.post('getDicItem', {dicId})
.then(response => response.json().body)
.catch(this.handleError)
}
}
export class EditDicItemService extends Service{
constructor(http) {
super(http);
}
get(): Promise<any> {
return this.post('editDicItem', {})
.then(response => response.json().body)
.catch(this.handleError)
}
}
export class AddDicItemService extends Service{
constructor(http) {
super(http);
}
get(): Promise<any> {
return this.post('addDicItem', {})
.then(response => response.json().body)
.catch(this.handleError)
}
}
export class DelDicItemService extends Service{
constructor(http) {<|fim▁hole|> super(http);
}
get(dictItemId:string): Promise<any> {
return this.post('delDicItem', {dictItemId})
.then(response => response.json().body)
.catch(this.handleError)
}
}<|fim▁end|> | |
<|file_name|>target_machine.rs<|end_file_name|><|fim▁begin|>//! Target machine information, to generate assembly or object files.
use super::prelude::*;
use super::target::LLVMTargetDataRef;
#[repr(C)]
pub struct LLVMOpaqueTargetMachine;
pub type LLVMTargetMachineRef = *mut LLVMOpaqueTargetMachine;
#[repr(C)]
pub struct LLVMTarget;
pub type LLVMTargetRef = *mut LLVMTarget;
#[repr(C)]
pub enum LLVMCodeGenOptLevel {
LLVMCodeGenLevelNone = 0,
LLVMCodeGenLevelLess = 1,
LLVMCodeGenLevelDefault = 2,
LLVMCodeGenLevelAggressive = 3
}
#[repr(C)]
pub enum LLVMRelocMode {
LLVMRelocDefault = 0,
LLVMRelocStatic = 1,
LLVMRelocPIC = 2,
LLVMRelocDynamicNoPic = 3,
}
#[repr(C)]
#[derive(Copy, Clone)]
pub enum LLVMCodeModel {
LLVMCodeModelDefault = 0,
LLVMCodeModelJITDefault = 1,
LLVMCodeModelSmall = 2,
LLVMCodeModelKernel = 3,
LLVMCodeModelMedium = 4,
LLVMCodeModelLarge = 5,
}
#[repr(C)]
pub enum LLVMCodeGenFileType {
LLVMAssemblyFile = 0,
LLVMObjectFile = 1,
}
extern "C" {
pub fn LLVMGetFirstTarget() -> LLVMTargetRef;
pub fn LLVMGetNextTarget(T: LLVMTargetRef) -> LLVMTargetRef;
pub fn LLVMGetTargetFromName(Name: *const ::libc::c_char)
-> LLVMTargetRef;<|fim▁hole|> pub fn LLVMGetTargetName(T: LLVMTargetRef) -> *const ::libc::c_char;
pub fn LLVMGetTargetDescription(T: LLVMTargetRef)
-> *const ::libc::c_char;
pub fn LLVMTargetHasJIT(T: LLVMTargetRef) -> LLVMBool;
pub fn LLVMTargetHasTargetMachine(T: LLVMTargetRef) -> LLVMBool;
pub fn LLVMTargetHasAsmBackend(T: LLVMTargetRef) -> LLVMBool;
pub fn LLVMCreateTargetMachine(T: LLVMTargetRef,
Triple: *const ::libc::c_char,
CPU: *const ::libc::c_char,
Features: *const ::libc::c_char,
Level: LLVMCodeGenOptLevel,
Reloc: LLVMRelocMode,
CodeModel: LLVMCodeModel)
-> LLVMTargetMachineRef;
pub fn LLVMDisposeTargetMachine(T: LLVMTargetMachineRef) -> ();
pub fn LLVMGetTargetMachineTarget(T: LLVMTargetMachineRef)
-> LLVMTargetRef;
pub fn LLVMGetTargetMachineTriple(T: LLVMTargetMachineRef)
-> *mut ::libc::c_char;
pub fn LLVMGetTargetMachineCPU(T: LLVMTargetMachineRef)
-> *mut ::libc::c_char;
pub fn LLVMGetTargetMachineFeatureString(T: LLVMTargetMachineRef)
-> *mut ::libc::c_char;
pub fn LLVMGetTargetMachineData(T: LLVMTargetMachineRef)
-> LLVMTargetDataRef;
pub fn LLVMSetTargetMachineAsmVerbosity(T: LLVMTargetMachineRef,
VerboseAsm: LLVMBool) -> ();
pub fn LLVMTargetMachineEmitToFile(T: LLVMTargetMachineRef,
M: LLVMModuleRef,
Filename: *mut ::libc::c_char,
codegen: LLVMCodeGenFileType,
ErrorMessage: *mut *mut ::libc::c_char)
-> LLVMBool;
pub fn LLVMTargetMachineEmitToMemoryBuffer(T: LLVMTargetMachineRef,
M: LLVMModuleRef,
codegen: LLVMCodeGenFileType,
ErrorMessage:
*mut *mut ::libc::c_char,
OutMemBuf:
*mut LLVMMemoryBufferRef)
-> LLVMBool;
pub fn LLVMGetDefaultTargetTriple() -> *mut ::libc::c_char;
pub fn LLVMAddAnalysisPasses(T: LLVMTargetMachineRef,
PM: LLVMPassManagerRef) -> ();
}<|fim▁end|> | pub fn LLVMGetTargetFromTriple(Triple: *const ::libc::c_char,
T: *mut LLVMTargetRef,
ErrorMessage: *mut *mut ::libc::c_char)
-> LLVMBool; |
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigNumber, ConfigSelection, ConfigSelectionNumber, getConfigListEntry
from Plugins.Plugin import PluginDescriptor
from enigma import setAnimation_current, setAnimation_speed, setAnimation_current_listbox
g_animation_paused = False
g_orig_show = None
g_orig_doClose = None
config.misc.window_animation_default = ConfigNumber(default = 6)
config.misc.window_animation_speed = ConfigSelectionNumber(1, 30, 1, default = 20)
config.misc.listbox_animation_default = ConfigSelection(default = "0", choices = [("0", _("Disable")), ("1", _("Enable")), ("2", _("Same behavior as current animation"))])
class AnimationSetupConfig(ConfigListScreen, Screen):
skin = """
<screen position="center,center" size="600,140" title="Animation Settings">
<widget name="config" position="0,0" size="600,100" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="0,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,100" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,100" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_green" render="Label" position="140,100" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_yellow" render="Label" position="280,100" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
</screen>
"""
def __init__(self, session):
self.session = session
self.entrylist = []
Screen.__init__(self, session)
ConfigListScreen.__init__(self, self.entrylist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions",], {
"ok" : self.keyGreen,
"green" : self.keyGreen,
"yellow" : self.keyYellow,
"red" : self.keyRed,
"cancel" : self.keyRed,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Default"))
self.makeConfigList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Animation Setup'))
def keyGreen(self):
config.misc.window_animation_speed.save()
setAnimation_speed(int(config.misc.window_animation_speed.value))
setAnimation_speed(int(config.misc.window_animation_speed.value))
config.misc.listbox_animation_default.save()
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
self.close()
def keyRed(self):
config.misc.window_animation_speed.cancel()
config.misc.listbox_animation_default.cancel()
self.close()
def keyYellow(self):
config.misc.window_animation_speed.value = 20
config.misc.listbox_animation_default.value = "0"
self.makeConfigList()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def makeConfigList(self):
self.entrylist = []
self.entrylist.append(getConfigListEntry(_("Animation Speed"), config.misc.window_animation_speed))
self.entrylist.append(getConfigListEntry(_("Enable Focus Animation"), config.misc.listbox_animation_default))
self["config"].list = self.entrylist
self["config"].l.setList(self.entrylist)
class AnimationSetupScreen(Screen):
animationSetupItems = [
{"idx":0, "name":_("Disable Animations")},
{"idx":1, "name":_("Simple fade")},
{"idx":2, "name":_("Grow drop")},
{"idx":3, "name":_("Grow from left")},
{"idx":4, "name":_("Popup")},
{"idx":5, "name":_("Slide drop")},
{"idx":6, "name":_("Slide left to right")},
{"idx":7, "name":_("Slide top to bottom")},
{"idx":8, "name":_("Stripes")},
]
skin = """
<screen name="AnimationSetup" position="center,center" size="580,400" title="Animation Setup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget name="list" position="10,60" size="560,364" scrollbarMode="showOnDemand" />
<widget source="introduction" render="Label" position="0,370" size="560,40" \
font="Regular;20" valign="center" transparent="1" />
</screen>
"""
def __init__(self, session):
self.skin = AnimationSetupScreen.skin
Screen.__init__(self, session)
self.animationList = []
self["introduction"] = StaticText(_("* current animation"))
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Settings"))
self["key_blue"] = StaticText(_("Preview"))
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.keyclose,
"save": self.ok,
"ok" : self.ok,
"yellow": self.config,
"blue": self.preview
}, -3)
self["list"] = MenuList(self.animationList)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
l = []
for x in self.animationSetupItems:
key = x.get("idx", 0)
name = x.get("name", "??")
if key == config.misc.window_animation_default.value:
name = "* %s" % (name)
l.append( (name, key) )
self["list"].setList(l)
def ok(self):
current = self["list"].getCurrent()
if current:
key = current[1]
config.misc.window_animation_default.value = key
config.misc.window_animation_default.save()
setAnimation_current(key)
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
self.close()
def keyclose(self):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
self.close()
def config(self):
self.session.open(AnimationSetupConfig)
def preview(self):
current = self["list"].getCurrent()
if current:
global g_animation_paused
tmp = g_animation_paused
g_animation_paused = False
setAnimation_current(current[1])
self.session.open(MessageBox, current[0], MessageBox.TYPE_INFO, timeout=3)
g_animation_paused = tmp
def checkAttrib(self, paused):
if g_animation_paused is paused:
try:
for (attr, value) in self.skinAttributes:
if attr == "animationPaused" and value in ("1", "on"):
return True
except:
pass
return False
def screen_show(self):
global g_animation_paused
if g_animation_paused:
setAnimation_current(0)
g_orig_show(self)
if checkAttrib(self, False):
g_animation_paused = True
def screen_doClose(self):
global g_animation_paused
if checkAttrib(self, True):
g_animation_paused = False
setAnimation_current(config.misc.window_animation_default.value)
g_orig_doClose(self)
def animationSetupMain(session, **kwargs):
session.open(AnimationSetupScreen)
def startAnimationSetup(menuid):
if menuid == "system":
return [( _("Animations"), animationSetupMain, "animation_setup", None)]
return []
def sessionAnimationSetup(session, reason, **kwargs):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
global g_orig_show, g_orig_doClose
if g_orig_show is None:
g_orig_show = Screen.show
if g_orig_doClose is None:
g_orig_doClose = Screen.doClose
Screen.show = screen_show
Screen.doClose = screen_doClose
def Plugins(**kwargs):
return [
PluginDescriptor(
name = "Animations",
description = "Setup UI animations",
where = PluginDescriptor.WHERE_MENU,
needsRestart = False,
fnc = startAnimationSetup),
PluginDescriptor(
where = PluginDescriptor.WHERE_SESSIONSTART,
needsRestart = False,<|fim▁hole|> ]<|fim▁end|> | fnc = sessionAnimationSetup), |
<|file_name|>hamming_distance.py<|end_file_name|><|fim▁begin|>def hamming(s,t):
dist = 0
<|fim▁hole|> if s[x]!=t[x]:
dist+=1
return dist<|fim▁end|> | for x in range(len(s)):
|
<|file_name|>CpuStats.java<|end_file_name|><|fim▁begin|>/*-
* -\-\-
* docker-client
* --
* Copyright (C) 2016 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.docker.client.messages;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.ANY;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import java.util.List;
@AutoValue
@JsonAutoDetect(fieldVisibility = ANY, getterVisibility = NONE, setterVisibility = NONE)
public abstract class CpuStats {
@JsonProperty("cpu_usage")
public abstract CpuUsage cpuUsage();
<|fim▁hole|> @JsonProperty("system_cpu_usage")
public abstract Long systemCpuUsage();
@JsonProperty("throttling_data")
public abstract ThrottlingData throttlingData();
@JsonCreator
static CpuStats create(
@JsonProperty("cpu_usage") final CpuUsage cpuUsage,
@JsonProperty("system_cpu_usage") final Long systemCpuUsage,
@JsonProperty("throttling_data") final ThrottlingData throttlingData) {
return new AutoValue_CpuStats(cpuUsage, systemCpuUsage, throttlingData);
}
@AutoValue
public abstract static class CpuUsage {
@JsonProperty("total_usage")
public abstract Long totalUsage();
@JsonProperty("percpu_usage")
public abstract ImmutableList<Long> percpuUsage();
@JsonProperty("usage_in_kernelmode")
public abstract Long usageInKernelmode();
@JsonProperty("usage_in_usermode")
public abstract Long usageInUsermode();
@JsonCreator
static CpuUsage create(
@JsonProperty("total_usage") final Long totalUsage,
@JsonProperty("percpu_usage") final List<Long> perCpuUsage,
@JsonProperty("usage_in_kernelmode") final Long usageInKernelmode,
@JsonProperty("usage_in_usermode") final Long usageInUsermode) {
return new AutoValue_CpuStats_CpuUsage(totalUsage, ImmutableList.copyOf(perCpuUsage),
usageInKernelmode, usageInUsermode);
}
}
@AutoValue
public abstract static class ThrottlingData {
@JsonProperty("periods")
public abstract Long periods();
@JsonProperty("throttled_periods")
public abstract Long throttledPeriods();
@JsonProperty("throttled_time")
public abstract Long throttledTime();
@JsonCreator
static ThrottlingData create(
@JsonProperty("periods") final Long periods,
@JsonProperty("throttled_periods") final Long throttledPeriods,
@JsonProperty("throttled_time") final Long throttledTime) {
return new AutoValue_CpuStats_ThrottlingData(periods, throttledPeriods, throttledTime);
}
}
}<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>'''
from https://docs.djangoproject.com/en/1.7/topics/auth/customizing/#specifying-a-custom-user-model
'''
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import gettext_lazy as _
from custom_user.models import User
class UserCreationForm(forms.ModelForm):
"""
A form for creating new users. Includes all the required
fields, plus a repeated password.
"""
password1 = forms.CharField(label=_('Password'), widget=forms.PasswordInput)
password2 = forms.CharField(label=_('Password confirmation'), widget=forms.PasswordInput)
class Meta:
model = User
fields = ('email',)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(_("Passwords don't match"))
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = User
fields = ('email', 'password', 'is_active', 'is_superuser')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class MyUserAdmin(UserAdmin):
# The forms to add and change user instances<|fim▁hole|> add_form = UserCreationForm
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = ('email', 'is_superuser')
list_filter = ('is_superuser',)
fieldsets = (
(None, {'fields': ('email','name', 'password', 'family')}),
('Permissions', {'fields': ('is_superuser','is_active',)}),
('Settings', {'fields': ('language','receive_update_emails','receive_photo_update_emails')}),
)
# add_fieldsets is not a standard ModelAdmin attribute. UserAdmin
# overrides get_fieldsets to use this attribute when creating a user.
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')}
),
)
search_fields = ('email',)
ordering = ('email',)
filter_horizontal = ()
raw_id_fields = ('family',)
# Now register the new UserAdmin...
admin.site.register(User, MyUserAdmin)<|fim▁end|> | form = UserChangeForm |
<|file_name|>PressureSensor.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <fcntl.h>
#include <errno.h>
#include <math.h>
#include <poll.h>
#include <unistd.h>
#include <dirent.h>
#include <sys/select.h>
#include <cutils/log.h>
#include "PressureSensor.h"
#define LOGTAG "PressureSensor"
/*
* The BMP driver gives pascal values.
* It needs to be changed into hectoPascal
*/
#define PRESSURE_HECTO (1.0f/100.0f)
/*****************************************************************************/
PressureSensor::PressureSensor()
: SensorBase(NULL, "pressure_sensor"),
mEnabled(0),
mInputReader(4),
mHasPendingEvent(false)
{
mPendingEvent.version = sizeof(sensors_event_t);
mPendingEvent.sensor = ID_PR;
mPendingEvent.type = SENSOR_TYPE_PRESSURE;
memset(mPendingEvent.data, 0, sizeof(mPendingEvent.data));
if (data_fd) {
strcpy(input_sysfs_path, "/sys/class/input/");
strcat(input_sysfs_path, input_name);
strcat(input_sysfs_path, "/device/");
input_sysfs_path_len = strlen(input_sysfs_path);
enable(0, 1);
}
}
PressureSensor::~PressureSensor() {
if (mEnabled) {
enable(0, 0);
}
}
int PressureSensor::setInitialState() {
struct input_absinfo absinfo;
if (!ioctl(data_fd, EVIOCGABS(EVENT_TYPE_PRESSURE), &absinfo)) {
// make sure to report an event immediately
mHasPendingEvent = true;
mPendingEvent.pressure = absinfo.value * PRESSURE_HECTO;
}
return 0;
}
int PressureSensor::enable(int32_t handle, int en) {
int flags = en ? 1 : 0;
int err;
if (flags != mEnabled) {
err = sspEnable(LOGTAG, SSP_PRESS, en);
if(err >= 0){
mEnabled = flags;
setInitialState();
return 0;
}
return -1;
}
return 0;
}
bool PressureSensor::hasPendingEvents() const {
return mHasPendingEvent;
}
int PressureSensor::setDelay(int32_t handle, int64_t ns)
{
int fd;
strcpy(&input_sysfs_path[input_sysfs_path_len], "pressure_poll_delay");
fd = open(input_sysfs_path, O_RDWR);
if (fd >= 0) {
char buf[80];
sprintf(buf, "%lld", ns);
write(fd, buf, strlen(buf)+1);
close(fd);
return 0;
}
return -1;
}
int PressureSensor::readEvents(sensors_event_t* data, int count)
{
if (count < 1)
return -EINVAL;
if (mHasPendingEvent) {
mHasPendingEvent = false;
mPendingEvent.timestamp = getTimestamp();
*data = mPendingEvent;
return mEnabled ? 1 : 0;
}
ssize_t n = mInputReader.fill(data_fd);
if (n < 0)
return n;
int numEventReceived = 0;
input_event const* event;
while (count && mInputReader.readEvent(&event)) {
int type = event->type;
if (type == EV_REL) {
if (event->code == EVENT_TYPE_PRESSURE) {
mPendingEvent.pressure = event->value * PRESSURE_HECTO;
}
} else if (type == EV_SYN) {
mPendingEvent.timestamp = timevalToNano(event->time);
if (mEnabled) {
*data++ = mPendingEvent;
count--;
numEventReceived++;
}
} else {
ALOGE("%s: unknown event (type=%d, code=%d)", LOGTAG,
type, event->code);
}
mInputReader.next();
}
return numEventReceived;
}<|fim▁end|> | /*
* Copyright (C) 2008 The Android Open Source Project |
<|file_name|>commands.go<|end_file_name|><|fim▁begin|>package client
import (
"bufio"
"bytes"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
"os/exec"
"path"
"path/filepath"
"runtime"
"sort"
"strconv"
"strings"
"sync"
"text/tabwriter"
"text/template"
"time"
log "github.com/Sirupsen/logrus"
"github.com/docker/docker/api"
"github.com/docker/docker/api/types"
"github.com/docker/docker/autogen/dockerversion"
"github.com/docker/docker/engine"
"github.com/docker/docker/graph"
"github.com/docker/docker/nat"
"github.com/docker/docker/opts"
"github.com/docker/docker/pkg/archive"
"github.com/docker/docker/pkg/common"
"github.com/docker/docker/pkg/fileutils"
"github.com/docker/docker/pkg/homedir"
flag "github.com/docker/docker/pkg/mflag"
"github.com/docker/docker/pkg/parsers"
"github.com/docker/docker/pkg/parsers/filters"
"github.com/docker/docker/pkg/progressreader"
"github.com/docker/docker/pkg/promise"
"github.com/docker/docker/pkg/resolvconf"
"github.com/docker/docker/pkg/signal"
"github.com/docker/docker/pkg/symlink"
"github.com/docker/docker/pkg/term"
"github.com/docker/docker/pkg/timeutils"
"github.com/docker/docker/pkg/units"
"github.com/docker/docker/pkg/urlutil"
"github.com/docker/docker/registry"
"github.com/docker/docker/runconfig"
"github.com/docker/docker/utils"
)
const (
tarHeaderSize = 512
)
func (cli *DockerCli) CmdHelp(args ...string) error {
if len(args) > 1 {
method, exists := cli.getMethod(args[:2]...)
if exists {
method("--help")
return nil
}
}
if len(args) > 0 {
method, exists := cli.getMethod(args[0])
if !exists {
fmt.Fprintf(cli.err, "docker: '%s' is not a docker command. See 'docker --help'.\n", args[0])
os.Exit(1)
} else {
method("--help")
return nil
}
}
flag.Usage()
return nil
}
func (cli *DockerCli) CmdBuild(args ...string) error {
cmd := cli.Subcmd("build", "PATH | URL | -", "Build a new image from the source code at PATH", true)
tag := cmd.String([]string{"t", "-tag"}, "", "Repository name (and optionally a tag) for the image")
suppressOutput := cmd.Bool([]string{"q", "-quiet"}, false, "Suppress the verbose output generated by the containers")
noCache := cmd.Bool([]string{"#no-cache", "-no-cache"}, false, "Do not use cache when building the image")
rm := cmd.Bool([]string{"#rm", "-rm"}, true, "Remove intermediate containers after a successful build")
forceRm := cmd.Bool([]string{"-force-rm"}, false, "Always remove intermediate containers")
pull := cmd.Bool([]string{"-pull"}, false, "Always attempt to pull a newer version of the image")
dockerfileName := cmd.String([]string{"f", "-file"}, "", "Name of the Dockerfile (Default is 'PATH/Dockerfile')")
flMemoryString := cmd.String([]string{"m", "-memory"}, "", "Memory limit")
flMemorySwap := cmd.String([]string{"-memory-swap"}, "", "Total memory (memory + swap), '-1' to disable swap")
flCpuShares := cmd.Int64([]string{"c", "-cpu-shares"}, 0, "CPU shares (relative weight)")
flCpuSetCpus := cmd.String([]string{"-cpuset-cpus"}, "", "CPUs in which to allow execution (0-3, 0,1)")
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
var (
context archive.Archive
isRemote bool
err error
)
_, err = exec.LookPath("git")
hasGit := err == nil
if cmd.Arg(0) == "-" {
// As a special case, 'docker build -' will build from either an empty context with the
// contents of stdin as a Dockerfile, or a tar-ed context from stdin.
buf := bufio.NewReader(cli.in)
magic, err := buf.Peek(tarHeaderSize)
if err != nil && err != io.EOF {
return fmt.Errorf("failed to peek context header from STDIN: %v", err)
}
if !archive.IsArchive(magic) {
dockerfile, err := ioutil.ReadAll(buf)
if err != nil {
return fmt.Errorf("failed to read Dockerfile from STDIN: %v", err)
}
// -f option has no meaning when we're reading it from stdin,
// so just use our default Dockerfile name
*dockerfileName = api.DefaultDockerfileName
context, err = archive.Generate(*dockerfileName, string(dockerfile))
} else {
context = ioutil.NopCloser(buf)
}
} else if urlutil.IsURL(cmd.Arg(0)) && (!urlutil.IsGitURL(cmd.Arg(0)) || !hasGit) {
isRemote = true
} else {
root := cmd.Arg(0)
if urlutil.IsGitURL(root) {
remoteURL := cmd.Arg(0)
if !urlutil.IsGitTransport(remoteURL) {
remoteURL = "https://" + remoteURL
}
root, err = ioutil.TempDir("", "docker-build-git")
if err != nil {
return err
}
defer os.RemoveAll(root)
if output, err := exec.Command("git", "clone", "--recursive", remoteURL, root).CombinedOutput(); err != nil {
return fmt.Errorf("Error trying to use git: %s (%s)", err, output)
}
}
if _, err := os.Stat(root); err != nil {
return err
}
absRoot, err := filepath.Abs(root)
if err != nil {
return err
}
filename := *dockerfileName // path to Dockerfile
if *dockerfileName == "" {
// No -f/--file was specified so use the default
*dockerfileName = api.DefaultDockerfileName
filename = filepath.Join(absRoot, *dockerfileName)
// Just to be nice ;-) look for 'dockerfile' too but only
// use it if we found it, otherwise ignore this check
if _, err = os.Lstat(filename); os.IsNotExist(err) {
tmpFN := path.Join(absRoot, strings.ToLower(*dockerfileName))
if _, err = os.Lstat(tmpFN); err == nil {
*dockerfileName = strings.ToLower(*dockerfileName)
filename = tmpFN
}
}
}
origDockerfile := *dockerfileName // used for error msg
if filename, err = filepath.Abs(filename); err != nil {
return err
}
// Verify that 'filename' is within the build context
filename, err = symlink.FollowSymlinkInScope(filename, absRoot)
if err != nil {
return fmt.Errorf("The Dockerfile (%s) must be within the build context (%s)", origDockerfile, root)
}
// Now reset the dockerfileName to be relative to the build context
*dockerfileName, err = filepath.Rel(absRoot, filename)
if err != nil {
return err
}
// And canonicalize dockerfile name to a platform-independent one
*dockerfileName, err = archive.CanonicalTarNameForPath(*dockerfileName)
if err != nil {
return fmt.Errorf("Cannot canonicalize dockerfile path %s: %v", dockerfileName, err)
}
if _, err = os.Lstat(filename); os.IsNotExist(err) {
return fmt.Errorf("Cannot locate Dockerfile: %s", origDockerfile)
}
var includes = []string{"."}
excludes, err := utils.ReadDockerIgnore(path.Join(root, ".dockerignore"))
if err != nil {
return err
}
// If .dockerignore mentions .dockerignore or the Dockerfile
// then make sure we send both files over to the daemon
// because Dockerfile is, obviously, needed no matter what, and
// .dockerignore is needed to know if either one needs to be
// removed. The deamon will remove them for us, if needed, after it
// parses the Dockerfile.
keepThem1, _ := fileutils.Matches(".dockerignore", excludes)
keepThem2, _ := fileutils.Matches(*dockerfileName, excludes)
if keepThem1 || keepThem2 {
includes = append(includes, ".dockerignore", *dockerfileName)
}
if err = utils.ValidateContextDirectory(root, excludes); err != nil {
return fmt.Errorf("Error checking context is accessible: '%s'. Please check permissions and try again.", err)
}
options := &archive.TarOptions{
Compression: archive.Uncompressed,
ExcludePatterns: excludes,
IncludeFiles: includes,
}
context, err = archive.TarWithOptions(root, options)
if err != nil {
return err
}
}
// windows: show error message about modified file permissions
// FIXME: this is not a valid warning when the daemon is running windows. should be removed once docker engine for windows can build.
if runtime.GOOS == "windows" {
log.Warn(`SECURITY WARNING: You are building a Docker image from Windows against a Linux Docker host. All files and directories added to build context will have '-rwxr-xr-x' permissions. It is recommended to double check and reset permissions for sensitive files and directories.`)
}
var body io.Reader
// Setup an upload progress bar
// FIXME: ProgressReader shouldn't be this annoying to use
if context != nil {
sf := utils.NewStreamFormatter(false)
body = progressreader.New(progressreader.Config{
In: context,
Out: cli.out,
Formatter: sf,
NewLines: true,
ID: "",
Action: "Sending build context to Docker daemon",
})
}
var memory int64
if *flMemoryString != "" {
parsedMemory, err := units.RAMInBytes(*flMemoryString)
if err != nil {
return err
}
memory = parsedMemory
}
var memorySwap int64
if *flMemorySwap != "" {
if *flMemorySwap == "-1" {
memorySwap = -1
} else {
parsedMemorySwap, err := units.RAMInBytes(*flMemorySwap)
if err != nil {
return err
}
memorySwap = parsedMemorySwap
}
}
// Send the build context
v := &url.Values{}
//Check if the given image name can be resolved
if *tag != "" {
repository, tag := parsers.ParseRepositoryTag(*tag)
if err := registry.ValidateRepositoryName(repository); err != nil {
return err
}
if len(tag) > 0 {
if err := graph.ValidateTagName(tag); err != nil {
return err
}
}
}
v.Set("t", *tag)
if *suppressOutput {
v.Set("q", "1")
}
if isRemote {
v.Set("remote", cmd.Arg(0))
}
if *noCache {
v.Set("nocache", "1")
}
if *rm {
v.Set("rm", "1")
} else {
v.Set("rm", "0")
}
if *forceRm {
v.Set("forcerm", "1")
}
if *pull {
v.Set("pull", "1")
}
v.Set("cpusetcpus", *flCpuSetCpus)
v.Set("cpushares", strconv.FormatInt(*flCpuShares, 10))
v.Set("memory", strconv.FormatInt(memory, 10))
v.Set("memswap", strconv.FormatInt(memorySwap, 10))
v.Set("dockerfile", *dockerfileName)
cli.LoadConfigFile()
headers := http.Header(make(map[string][]string))
buf, err := json.Marshal(cli.configFile)
if err != nil {
return err
}
headers.Add("X-Registry-Config", base64.URLEncoding.EncodeToString(buf))
if context != nil {
headers.Set("Content-Type", "application/tar")
}
err = cli.stream("POST", fmt.Sprintf("/build?%s", v.Encode()), body, cli.out, headers)
if jerr, ok := err.(*utils.JSONError); ok {
// If no error code is set, default to 1
if jerr.Code == 0 {
jerr.Code = 1
}
return &utils.StatusError{Status: jerr.Message, StatusCode: jerr.Code}
}
return err
}
// 'docker login': login / register a user to registry service.
func (cli *DockerCli) CmdLogin(args ...string) error {
cmd := cli.Subcmd("login", "[SERVER]", "Register or log in to a Docker registry server, if no server is\nspecified \""+registry.IndexServerAddress()+"\" is the default.", true)
cmd.Require(flag.Max, 1)
var username, password, email string
cmd.StringVar(&username, []string{"u", "-username"}, "", "Username")
cmd.StringVar(&password, []string{"p", "-password"}, "", "Password")
cmd.StringVar(&email, []string{"e", "-email"}, "", "Email")
utils.ParseFlags(cmd, args, true)
serverAddress := registry.IndexServerAddress()
if len(cmd.Args()) > 0 {
serverAddress = cmd.Arg(0)
}
promptDefault := func(prompt string, configDefault string) {
if configDefault == "" {
fmt.Fprintf(cli.out, "%s: ", prompt)
} else {
fmt.Fprintf(cli.out, "%s (%s): ", prompt, configDefault)
}
}
readInput := func(in io.Reader, out io.Writer) string {
reader := bufio.NewReader(in)
line, _, err := reader.ReadLine()
if err != nil {
fmt.Fprintln(out, err.Error())
os.Exit(1)
}
return string(line)
}
cli.LoadConfigFile()
authconfig, ok := cli.configFile.Configs[serverAddress]
if !ok {
authconfig = registry.AuthConfig{}
}
if username == "" {
promptDefault("Username", authconfig.Username)
username = readInput(cli.in, cli.out)
username = strings.Trim(username, " ")
if username == "" {
username = authconfig.Username
}
}
// Assume that a different username means they may not want to use
// the password or email from the config file, so prompt them
if username != authconfig.Username {
if password == "" {
oldState, err := term.SaveState(cli.inFd)
if err != nil {
return err
}
fmt.Fprintf(cli.out, "Password: ")
term.DisableEcho(cli.inFd, oldState)
password = readInput(cli.in, cli.out)
fmt.Fprint(cli.out, "\n")
term.RestoreTerminal(cli.inFd, oldState)
if password == "" {
return fmt.Errorf("Error : Password Required")
}
}
if email == "" {
promptDefault("Email", authconfig.Email)
email = readInput(cli.in, cli.out)
if email == "" {
email = authconfig.Email
}
}
} else {
// However, if they don't override the username use the
// password or email from the cmd line if specified. IOW, allow
// then to change/override them. And if not specified, just
// use what's in the config file
if password == "" {
password = authconfig.Password
}
if email == "" {
email = authconfig.Email
}
}
authconfig.Username = username
authconfig.Password = password
authconfig.Email = email
authconfig.ServerAddress = serverAddress
cli.configFile.Configs[serverAddress] = authconfig
stream, statusCode, err := cli.call("POST", "/auth", cli.configFile.Configs[serverAddress], false)
if statusCode == 401 {
delete(cli.configFile.Configs, serverAddress)
registry.SaveConfig(cli.configFile)
return err
}
if err != nil {
return err
}
var out2 engine.Env
err = out2.Decode(stream)
if err != nil {
cli.configFile, _ = registry.LoadConfig(homedir.Get())
return err
}
registry.SaveConfig(cli.configFile)
fmt.Fprintf(cli.out, "WARNING: login credentials saved in %s.\n", path.Join(homedir.Get(), registry.CONFIGFILE))
if out2.Get("Status") != "" {
fmt.Fprintf(cli.out, "%s\n", out2.Get("Status"))
}
return nil
}
// log out from a Docker registry
func (cli *DockerCli) CmdLogout(args ...string) error {
cmd := cli.Subcmd("logout", "[SERVER]", "Log out from a Docker registry, if no server is\nspecified \""+registry.IndexServerAddress()+"\" is the default.", true)
cmd.Require(flag.Max, 1)
utils.ParseFlags(cmd, args, false)
serverAddress := registry.IndexServerAddress()
if len(cmd.Args()) > 0 {
serverAddress = cmd.Arg(0)
}
cli.LoadConfigFile()
if _, ok := cli.configFile.Configs[serverAddress]; !ok {
fmt.Fprintf(cli.out, "Not logged in to %s\n", serverAddress)
} else {
fmt.Fprintf(cli.out, "Remove login credentials for %s\n", serverAddress)
delete(cli.configFile.Configs, serverAddress)
if err := registry.SaveConfig(cli.configFile); err != nil {
return fmt.Errorf("Failed to save docker config: %v", err)
}
}
return nil
}
// 'docker wait': block until a container stops
func (cli *DockerCli) CmdWait(args ...string) error {
cmd := cli.Subcmd("wait", "CONTAINER [CONTAINER...]", "Block until a container stops, then print its exit code.", true)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
var encounteredError error
for _, name := range cmd.Args() {
status, err := waitForExit(cli, name)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to wait one or more containers")
} else {
fmt.Fprintf(cli.out, "%d\n", status)
}
}
return encounteredError
}
// 'docker version': show version information
func (cli *DockerCli) CmdVersion(args ...string) error {
cmd := cli.Subcmd("version", "", "Show the Docker version information.", true)
cmd.Require(flag.Exact, 0)
utils.ParseFlags(cmd, args, false)
if dockerversion.VERSION != "" {
fmt.Fprintf(cli.out, "Client version: %s\n", dockerversion.VERSION)
}
fmt.Fprintf(cli.out, "Client API version: %s\n", api.APIVERSION)
fmt.Fprintf(cli.out, "Go version (client): %s\n", runtime.Version())
if dockerversion.GITCOMMIT != "" {
fmt.Fprintf(cli.out, "Git commit (client): %s\n", dockerversion.GITCOMMIT)
}
fmt.Fprintf(cli.out, "OS/Arch (client): %s/%s\n", runtime.GOOS, runtime.GOARCH)
body, _, err := readBody(cli.call("GET", "/version", nil, false))
if err != nil {
return err
}
out := engine.NewOutput()
remoteVersion, err := out.AddEnv()
if err != nil {
log.Errorf("Error reading remote version: %s", err)
return err
}
if _, err := out.Write(body); err != nil {
log.Errorf("Error reading remote version: %s", err)
return err
}
out.Close()
fmt.Fprintf(cli.out, "Server version: %s\n", remoteVersion.Get("Version"))
if apiVersion := remoteVersion.Get("ApiVersion"); apiVersion != "" {
fmt.Fprintf(cli.out, "Server API version: %s\n", apiVersion)
}
fmt.Fprintf(cli.out, "Go version (server): %s\n", remoteVersion.Get("GoVersion"))
fmt.Fprintf(cli.out, "Git commit (server): %s\n", remoteVersion.Get("GitCommit"))
fmt.Fprintf(cli.out, "OS/Arch (server): %s/%s\n", remoteVersion.Get("Os"), remoteVersion.Get("Arch"))
return nil
}
// 'docker info': display system-wide information.
func (cli *DockerCli) CmdInfo(args ...string) error {
cmd := cli.Subcmd("info", "", "Display system-wide information", true)
cmd.Require(flag.Exact, 0)
utils.ParseFlags(cmd, args, false)
body, _, err := readBody(cli.call("GET", "/info", nil, false))
if err != nil {
return err
}
out := engine.NewOutput()
remoteInfo, err := out.AddEnv()
if err != nil {
return err
}
if _, err := out.Write(body); err != nil {
log.Errorf("Error reading remote info: %s", err)
return err
}
out.Close()
if remoteInfo.Exists("Containers") {
fmt.Fprintf(cli.out, "Containers: %d\n", remoteInfo.GetInt("Containers"))
}
if remoteInfo.Exists("Images") {
fmt.Fprintf(cli.out, "Images: %d\n", remoteInfo.GetInt("Images"))
}
if remoteInfo.Exists("Driver") {
fmt.Fprintf(cli.out, "Storage Driver: %s\n", remoteInfo.Get("Driver"))
}
if remoteInfo.Exists("DriverStatus") {
var driverStatus [][2]string
if err := remoteInfo.GetJson("DriverStatus", &driverStatus); err != nil {
return err
}
for _, pair := range driverStatus {
fmt.Fprintf(cli.out, " %s: %s\n", pair[0], pair[1])
}
}
if remoteInfo.Exists("ExecutionDriver") {
fmt.Fprintf(cli.out, "Execution Driver: %s\n", remoteInfo.Get("ExecutionDriver"))
}
if remoteInfo.Exists("KernelVersion") {
fmt.Fprintf(cli.out, "Kernel Version: %s\n", remoteInfo.Get("KernelVersion"))
}
if remoteInfo.Exists("OperatingSystem") {
fmt.Fprintf(cli.out, "Operating System: %s\n", remoteInfo.Get("OperatingSystem"))
}
if remoteInfo.Exists("NCPU") {
fmt.Fprintf(cli.out, "CPUs: %d\n", remoteInfo.GetInt("NCPU"))
}
if remoteInfo.Exists("MemTotal") {
fmt.Fprintf(cli.out, "Total Memory: %s\n", units.BytesSize(float64(remoteInfo.GetInt64("MemTotal"))))
}
if remoteInfo.Exists("Name") {
fmt.Fprintf(cli.out, "Name: %s\n", remoteInfo.Get("Name"))
}
if remoteInfo.Exists("ID") {
fmt.Fprintf(cli.out, "ID: %s\n", remoteInfo.Get("ID"))
}
if remoteInfo.GetBool("Debug") || os.Getenv("DEBUG") != "" {
if remoteInfo.Exists("Debug") {
fmt.Fprintf(cli.out, "Debug mode (server): %v\n", remoteInfo.GetBool("Debug"))
}
fmt.Fprintf(cli.out, "Debug mode (client): %v\n", os.Getenv("DEBUG") != "")
if remoteInfo.Exists("NFd") {
fmt.Fprintf(cli.out, "Fds: %d\n", remoteInfo.GetInt("NFd"))
}
if remoteInfo.Exists("NGoroutines") {
fmt.Fprintf(cli.out, "Goroutines: %d\n", remoteInfo.GetInt("NGoroutines"))
}
if remoteInfo.Exists("SystemTime") {
t, err := remoteInfo.GetTime("SystemTime")
if err != nil {
log.Errorf("Error reading system time: %v", err)
} else {
fmt.Fprintf(cli.out, "System Time: %s\n", t.Format(time.UnixDate))
}
}
if remoteInfo.Exists("NEventsListener") {
fmt.Fprintf(cli.out, "EventsListeners: %d\n", remoteInfo.GetInt("NEventsListener"))
}
if initSha1 := remoteInfo.Get("InitSha1"); initSha1 != "" {
fmt.Fprintf(cli.out, "Init SHA1: %s\n", initSha1)
}
if initPath := remoteInfo.Get("InitPath"); initPath != "" {
fmt.Fprintf(cli.out, "Init Path: %s\n", initPath)
}
if root := remoteInfo.Get("DockerRootDir"); root != "" {
fmt.Fprintf(cli.out, "Docker Root Dir: %s\n", root)
}
}
if remoteInfo.Exists("HttpProxy") {
fmt.Fprintf(cli.out, "Http Proxy: %s\n", remoteInfo.Get("HttpProxy"))
}
if remoteInfo.Exists("HttpsProxy") {
fmt.Fprintf(cli.out, "Https Proxy: %s\n", remoteInfo.Get("HttpsProxy"))
}
if remoteInfo.Exists("NoProxy") {
fmt.Fprintf(cli.out, "No Proxy: %s\n", remoteInfo.Get("NoProxy"))
}
if len(remoteInfo.GetList("IndexServerAddress")) != 0 {
cli.LoadConfigFile()
u := cli.configFile.Configs[remoteInfo.Get("IndexServerAddress")].Username
if len(u) > 0 {
fmt.Fprintf(cli.out, "Username: %v\n", u)
fmt.Fprintf(cli.out, "Registry: %v\n", remoteInfo.GetList("IndexServerAddress"))
}
}
if remoteInfo.Exists("MemoryLimit") && !remoteInfo.GetBool("MemoryLimit") {
fmt.Fprintf(cli.err, "WARNING: No memory limit support\n")
}
if remoteInfo.Exists("SwapLimit") && !remoteInfo.GetBool("SwapLimit") {
fmt.Fprintf(cli.err, "WARNING: No swap limit support\n")
}
if remoteInfo.Exists("IPv4Forwarding") && !remoteInfo.GetBool("IPv4Forwarding") {
fmt.Fprintf(cli.err, "WARNING: IPv4 forwarding is disabled.\n")
}
if remoteInfo.Exists("Labels") {
fmt.Fprintln(cli.out, "Labels:")
for _, attribute := range remoteInfo.GetList("Labels") {
fmt.Fprintf(cli.out, " %s\n", attribute)
}
}
return nil
}
func (cli *DockerCli) CmdStop(args ...string) error {
cmd := cli.Subcmd("stop", "CONTAINER [CONTAINER...]", "Stop a running container by sending SIGTERM and then SIGKILL after a\ngrace period", true)
nSeconds := cmd.Int([]string{"t", "-time"}, 10, "Seconds to wait for stop before killing it")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
v := url.Values{}
v.Set("t", strconv.Itoa(*nSeconds))
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := readBody(cli.call("POST", "/containers/"+name+"/stop?"+v.Encode(), nil, false))
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to stop one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) CmdRestart(args ...string) error {
cmd := cli.Subcmd("restart", "CONTAINER [CONTAINER...]", "Restart a running container", true)
nSeconds := cmd.Int([]string{"t", "-time"}, 10, "Seconds to wait for stop before killing the container")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
v := url.Values{}
v.Set("t", strconv.Itoa(*nSeconds))
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := readBody(cli.call("POST", "/containers/"+name+"/restart?"+v.Encode(), nil, false))
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to restart one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) forwardAllSignals(cid string) chan os.Signal {
sigc := make(chan os.Signal, 128)
signal.CatchAll(sigc)
go func() {
for s := range sigc {
if s == signal.SIGCHLD {
continue
}
var sig string
for sigStr, sigN := range signal.SignalMap {
if sigN == s {
sig = sigStr
break
}
}
if sig == "" {
log.Errorf("Unsupported signal: %v. Discarding.", s)
}
if _, _, err := readBody(cli.call("POST", fmt.Sprintf("/containers/%s/kill?signal=%s", cid, sig), nil, false)); err != nil {
log.Debugf("Error sending signal: %s", err)
}
}
}()
return sigc
}
func (cli *DockerCli) CmdStart(args ...string) error {
var (
cErr chan error
tty bool
cmd = cli.Subcmd("start", "CONTAINER [CONTAINER...]", "Start one or more stopped containers", true)
attach = cmd.Bool([]string{"a", "-attach"}, false, "Attach STDOUT/STDERR and forward signals")
openStdin = cmd.Bool([]string{"i", "-interactive"}, false, "Attach container's STDIN")
)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
if *attach || *openStdin {
if cmd.NArg() > 1 {
return fmt.Errorf("You cannot start and attach multiple containers at once.")
}
stream, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/json", nil, false)
if err != nil {
return err
}
env := engine.Env{}
if err := env.Decode(stream); err != nil {
return err
}
config := env.GetSubEnv("Config")
tty = config.GetBool("Tty")
if !tty {
sigc := cli.forwardAllSignals(cmd.Arg(0))
defer signal.StopCatch(sigc)
}
var in io.ReadCloser
v := url.Values{}
v.Set("stream", "1")
if *openStdin && config.GetBool("OpenStdin") {
v.Set("stdin", "1")
in = cli.in
}
v.Set("stdout", "1")
v.Set("stderr", "1")
hijacked := make(chan io.Closer)
// Block the return until the chan gets closed
defer func() {
log.Debugf("CmdStart() returned, defer waiting for hijack to finish.")
if _, ok := <-hijacked; ok {
log.Errorf("Hijack did not finish (chan still open)")
}
cli.in.Close()
}()
cErr = promise.Go(func() error {
return cli.hijack("POST", "/containers/"+cmd.Arg(0)+"/attach?"+v.Encode(), tty, in, cli.out, cli.err, hijacked, nil)
})
// Acknowledge the hijack before starting
select {
case closer := <-hijacked:
// Make sure that the hijack gets closed when returning (results
// in closing the hijack chan and freeing server's goroutines)
if closer != nil {
defer closer.Close()
}
case err := <-cErr:
if err != nil {
return err
}
}
}
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := readBody(cli.call("POST", "/containers/"+name+"/start", nil, false))
if err != nil {
if !*attach && !*openStdin {
// attach and openStdin is false means it could be starting multiple containers
// when a container start failed, show the error message and start next
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to start one or more containers")
} else {
encounteredError = err
}
} else {
if !*attach && !*openStdin {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
}
if encounteredError != nil {
return encounteredError
}
if *openStdin || *attach {
if tty && cli.isTerminalOut {
if err := cli.monitorTtySize(cmd.Arg(0), false); err != nil {
log.Errorf("Error monitoring TTY size: %s", err)
}
}
if attchErr := <-cErr; attchErr != nil {
return attchErr
}
_, status, err := getExitCode(cli, cmd.Arg(0))
if err != nil {
return err
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
}
return nil
}
func (cli *DockerCli) CmdUnpause(args ...string) error {
cmd := cli.Subcmd("unpause", "CONTAINER [CONTAINER...]", "Unpause all processes within a container", true)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, false)
var encounteredError error
for _, name := range cmd.Args() {
if _, _, err := readBody(cli.call("POST", fmt.Sprintf("/containers/%s/unpause", name), nil, false)); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to unpause container named %s", name)
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) CmdPause(args ...string) error {
cmd := cli.Subcmd("pause", "CONTAINER [CONTAINER...]", "Pause all processes within a container", true)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, false)
var encounteredError error
for _, name := range cmd.Args() {
if _, _, err := readBody(cli.call("POST", fmt.Sprintf("/containers/%s/pause", name), nil, false)); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to pause container named %s", name)
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) CmdRename(args ...string) error {
cmd := cli.Subcmd("rename", "OLD_NAME NEW_NAME", "Rename a container", true)
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 2 {
cmd.Usage()
return nil
}
old_name := cmd.Arg(0)
new_name := cmd.Arg(1)
if _, _, err := readBody(cli.call("POST", fmt.Sprintf("/containers/%s/rename?name=%s", old_name, new_name), nil, false)); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
return fmt.Errorf("Error: failed to rename container named %s", old_name)
}
return nil
}
func (cli *DockerCli) CmdInspect(args ...string) error {
cmd := cli.Subcmd("inspect", "CONTAINER|IMAGE [CONTAINER|IMAGE...]", "Return low-level information on a container or image", true)
tmplStr := cmd.String([]string{"f", "#format", "-format"}, "", "Format the output using the given go template")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
var tmpl *template.Template
if *tmplStr != "" {
var err error
if tmpl, err = template.New("").Funcs(funcMap).Parse(*tmplStr); err != nil {
fmt.Fprintf(cli.err, "Template parsing error: %v\n", err)
return &utils.StatusError{StatusCode: 64,
Status: "Template parsing error: " + err.Error()}
}
}
indented := new(bytes.Buffer)
indented.WriteByte('[')
status := 0
for _, name := range cmd.Args() {
obj, _, err := readBody(cli.call("GET", "/containers/"+name+"/json", nil, false))
if err != nil {
if strings.Contains(err.Error(), "Too many") {
fmt.Fprintf(cli.err, "Error: %v", err)
status = 1
continue
}
obj, _, err = readBody(cli.call("GET", "/images/"+name+"/json", nil, false))
if err != nil {
if strings.Contains(err.Error(), "No such") {
fmt.Fprintf(cli.err, "Error: No such image or container: %s\n", name)
} else {
fmt.Fprintf(cli.err, "%s", err)
}
status = 1
continue
}
}
if tmpl == nil {
if err = json.Indent(indented, obj, "", " "); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
status = 1
continue
}
} else {
// Has template, will render
var value interface{}
if err := json.Unmarshal(obj, &value); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
status = 1
continue
}
if err := tmpl.Execute(cli.out, value); err != nil {
return err
}
cli.out.Write([]byte{'\n'})
}
indented.WriteString(",")
}
if indented.Len() > 1 {
// Remove trailing ','
indented.Truncate(indented.Len() - 1)
}
indented.WriteString("]\n")
if tmpl == nil {
if _, err := io.Copy(cli.out, indented); err != nil {
return err
}
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
func (cli *DockerCli) CmdTop(args ...string) error {
cmd := cli.Subcmd("top", "CONTAINER [ps OPTIONS]", "Display the running processes of a container", true)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
val := url.Values{}
if cmd.NArg() > 1 {
val.Set("ps_args", strings.Join(cmd.Args()[1:], " "))
}
stream, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/top?"+val.Encode(), nil, false)
if err != nil {
return err
}
var procs engine.Env
if err := procs.Decode(stream); err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
fmt.Fprintln(w, strings.Join(procs.GetList("Titles"), "\t"))
processes := [][]string{}
if err := procs.GetJson("Processes", &processes); err != nil {
return err
}
for _, proc := range processes {
fmt.Fprintln(w, strings.Join(proc, "\t"))
}
w.Flush()
return nil
}
func (cli *DockerCli) CmdPort(args ...string) error {
cmd := cli.Subcmd("port", "CONTAINER [PRIVATE_PORT[/PROTO]]", "List port mappings for the CONTAINER, or lookup the public-facing port that\nis NAT-ed to the PRIVATE_PORT", true)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
stream, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/json", nil, false)
if err != nil {
return err
}
env := engine.Env{}
if err := env.Decode(stream); err != nil {
return err
}
ports := nat.PortMap{}
if err := env.GetSubEnv("NetworkSettings").GetJson("Ports", &ports); err != nil {
return err
}
if cmd.NArg() == 2 {
var (
port = cmd.Arg(1)
proto = "tcp"
parts = strings.SplitN(port, "/", 2)
)
if len(parts) == 2 && len(parts[1]) != 0 {
port = parts[0]
proto = parts[1]
}
natPort := port + "/" + proto
if frontends, exists := ports[nat.Port(port+"/"+proto)]; exists && frontends != nil {
for _, frontend := range frontends {
fmt.Fprintf(cli.out, "%s:%s\n", frontend.HostIp, frontend.HostPort)
}
return nil
}
return fmt.Errorf("Error: No public port '%s' published for %s", natPort, cmd.Arg(0))
}
for from, frontends := range ports {
for _, frontend := range frontends {
fmt.Fprintf(cli.out, "%s -> %s:%s\n", from, frontend.HostIp, frontend.HostPort)
}
}
return nil
}
// 'docker rmi IMAGE' removes all images with the name IMAGE
func (cli *DockerCli) CmdRmi(args ...string) error {
var (
cmd = cli.Subcmd("rmi", "IMAGE [IMAGE...]", "Remove one or more images", true)
force = cmd.Bool([]string{"f", "-force"}, false, "Force removal of the image")
noprune = cmd.Bool([]string{"-no-prune"}, false, "Do not delete untagged parents")
)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
v := url.Values{}
if *force {
v.Set("force", "1")
}
if *noprune {
v.Set("noprune", "1")
}
var encounteredError error
for _, name := range cmd.Args() {
body, _, err := readBody(cli.call("DELETE", "/images/"+name+"?"+v.Encode(), nil, false))
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to remove one or more images")
} else {
outs := engine.NewTable("Created", 0)
if _, err := outs.ReadListFrom(body); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to remove one or more images")
continue
}
for _, out := range outs.Data {
if out.Get("Deleted") != "" {
fmt.Fprintf(cli.out, "Deleted: %s\n", out.Get("Deleted"))
} else {
fmt.Fprintf(cli.out, "Untagged: %s\n", out.Get("Untagged"))
}
}
}
}
return encounteredError
}
func (cli *DockerCli) CmdHistory(args ...string) error {
cmd := cli.Subcmd("history", "IMAGE", "Show the history of an image", true)
quiet := cmd.Bool([]string{"q", "-quiet"}, false, "Only show numeric IDs")
noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
body, _, err := readBody(cli.call("GET", "/images/"+cmd.Arg(0)+"/history", nil, false))
if err != nil {
return err
}
outs := engine.NewTable("Created", 0)
if _, err := outs.ReadListFrom(body); err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
fmt.Fprintln(w, "IMAGE\tCREATED\tCREATED BY\tSIZE")
}
for _, out := range outs.Data {
outID := out.Get("Id")
if !*quiet {
if *noTrunc {
fmt.Fprintf(w, "%s\t", outID)
} else {
fmt.Fprintf(w, "%s\t", common.TruncateID(outID))
}
fmt.Fprintf(w, "%s ago\t", units.HumanDuration(time.Now().UTC().Sub(time.Unix(out.GetInt64("Created"), 0))))
if *noTrunc {
fmt.Fprintf(w, "%s\t", out.Get("CreatedBy"))
} else {
fmt.Fprintf(w, "%s\t", utils.Trunc(out.Get("CreatedBy"), 45))
}
fmt.Fprintf(w, "%s\n", units.HumanSize(float64(out.GetInt64("Size"))))
} else {
if *noTrunc {
fmt.Fprintln(w, outID)
} else {
fmt.Fprintln(w, common.TruncateID(outID))
}
}
}
w.Flush()
return nil
}
func (cli *DockerCli) CmdRm(args ...string) error {
cmd := cli.Subcmd("rm", "CONTAINER [CONTAINER...]", "Remove one or more containers", true)
v := cmd.Bool([]string{"v", "-volumes"}, false, "Remove the volumes associated with the container")
link := cmd.Bool([]string{"l", "#link", "-link"}, false, "Remove the specified link")
force := cmd.Bool([]string{"f", "-force"}, false, "Force the removal of a running container (uses SIGKILL)")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
val := url.Values{}
if *v {
val.Set("v", "1")
}
if *link {
val.Set("link", "1")
}
if *force {
val.Set("force", "1")
}
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := readBody(cli.call("DELETE", "/containers/"+name+"?"+val.Encode(), nil, false))
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to remove one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
// 'docker kill NAME' kills a running container
func (cli *DockerCli) CmdKill(args ...string) error {
cmd := cli.Subcmd("kill", "CONTAINER [CONTAINER...]", "Kill a running container using SIGKILL or a specified signal", true)
signal := cmd.String([]string{"s", "-signal"}, "KILL", "Signal to send to the container")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
var encounteredError error
for _, name := range cmd.Args() {
if _, _, err := readBody(cli.call("POST", fmt.Sprintf("/containers/%s/kill?signal=%s", name, *signal), nil, false)); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to kill one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) CmdImport(args ...string) error {
cmd := cli.Subcmd("import", "URL|- [REPOSITORY[:TAG]]", "Create an empty filesystem image and import the contents of the\ntarball (.tar, .tar.gz, .tgz, .bzip, .tar.xz, .txz) into it, then\noptionally tag it.", true)
flChanges := opts.NewListOpts(nil)
cmd.Var(&flChanges, []string{"c", "-change"}, "Apply Dockerfile instruction to the created image")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
var (
v = url.Values{}
src = cmd.Arg(0)
repository = cmd.Arg(1)
)
v.Set("fromSrc", src)
v.Set("repo", repository)
for _, change := range flChanges.GetAll() {
v.Add("changes", change)
}
if cmd.NArg() == 3 {
fmt.Fprintf(cli.err, "[DEPRECATED] The format 'URL|- [REPOSITORY [TAG]]' has been deprecated. Please use URL|- [REPOSITORY[:TAG]]\n")
v.Set("tag", cmd.Arg(2))
}
if repository != "" {
//Check if the given image name can be resolved
repo, _ := parsers.ParseRepositoryTag(repository)
if err := registry.ValidateRepositoryName(repo); err != nil {
return err
}
}
var in io.Reader
if src == "-" {
in = cli.in
}
return cli.stream("POST", "/images/create?"+v.Encode(), in, cli.out, nil)
}
func (cli *DockerCli) CmdPush(args ...string) error {
cmd := cli.Subcmd("push", "NAME[:TAG]", "Push an image or a repository to the registry", true)
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
name := cmd.Arg(0)
cli.LoadConfigFile()
remote, tag := parsers.ParseRepositoryTag(name)
// Resolve the Repository name from fqn to RepositoryInfo
repoInfo, err := registry.ParseRepositoryInfo(remote)
if err != nil {
return err
}
// Resolve the Auth config relevant for this server
authConfig := cli.configFile.ResolveAuthConfig(repoInfo.Index)
// If we're not using a custom registry, we know the restrictions
// applied to repository names and can warn the user in advance.
// Custom repositories can have different rules, and we must also
// allow pushing by image ID.
if repoInfo.Official {
username := authConfig.Username
if username == "" {
username = "<user>"
}
return fmt.Errorf("You cannot push a \"root\" repository. Please rename your repository to <user>/<repo> (ex: %s/%s)", username, repoInfo.LocalName)
}
v := url.Values{}
v.Set("tag", tag)
push := func(authConfig registry.AuthConfig) error {
buf, err := json.Marshal(authConfig)
if err != nil {
return err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
return cli.stream("POST", "/images/"+remote+"/push?"+v.Encode(), nil, cli.out, map[string][]string{
"X-Registry-Auth": registryAuthHeader,
})
}
if err := push(authConfig); err != nil {
if strings.Contains(err.Error(), "Status 401") {
fmt.Fprintln(cli.out, "\nPlease login prior to push:")
if err := cli.CmdLogin(repoInfo.Index.GetAuthConfigKey()); err != nil {
return err
}
authConfig := cli.configFile.ResolveAuthConfig(repoInfo.Index)
return push(authConfig)
}
return err
}
return nil
}
func (cli *DockerCli) CmdPull(args ...string) error {
cmd := cli.Subcmd("pull", "NAME[:TAG|@DIGEST]", "Pull an image or a repository from the registry", true)
allTags := cmd.Bool([]string{"a", "-all-tags"}, false, "Download all tagged images in the repository")
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
var (
v = url.Values{}
remote = cmd.Arg(0)
newRemote = remote
)
taglessRemote, tag := parsers.ParseRepositoryTag(remote)
if tag == "" && !*allTags {
newRemote = utils.ImageReference(taglessRemote, graph.DEFAULTTAG)
}
if tag != "" && *allTags {
return fmt.Errorf("tag can't be used with --all-tags/-a")
}
v.Set("fromImage", newRemote)
// Resolve the Repository name from fqn to RepositoryInfo
repoInfo, err := registry.ParseRepositoryInfo(taglessRemote)
if err != nil {
return err
}
cli.LoadConfigFile()
// Resolve the Auth config relevant for this server
authConfig := cli.configFile.ResolveAuthConfig(repoInfo.Index)
pull := func(authConfig registry.AuthConfig) error {
buf, err := json.Marshal(authConfig)
if err != nil {
return err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
return cli.stream("POST", "/images/create?"+v.Encode(), nil, cli.out, map[string][]string{
"X-Registry-Auth": registryAuthHeader,
})
}
if err := pull(authConfig); err != nil {
if strings.Contains(err.Error(), "Status 401") {
fmt.Fprintln(cli.out, "\nPlease login prior to pull:")
if err := cli.CmdLogin(repoInfo.Index.GetAuthConfigKey()); err != nil {
return err
}
authConfig := cli.configFile.ResolveAuthConfig(repoInfo.Index)
return pull(authConfig)
}
return err
}
return nil
}
func (cli *DockerCli) CmdImages(args ...string) error {
cmd := cli.Subcmd("images", "[REPOSITORY]", "List images", true)
quiet := cmd.Bool([]string{"q", "-quiet"}, false, "Only show numeric IDs")
all := cmd.Bool([]string{"a", "-all"}, false, "Show all images (default hides intermediate images)")
noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
showDigests := cmd.Bool([]string{"-digests"}, false, "Show digests")
// FIXME: --viz and --tree are deprecated. Remove them in a future version.
flViz := cmd.Bool([]string{"#v", "#viz", "#-viz"}, false, "Output graph in graphviz format")
flTree := cmd.Bool([]string{"#t", "#tree", "#-tree"}, false, "Output graph in tree format")
flFilter := opts.NewListOpts(nil)
cmd.Var(&flFilter, []string{"f", "-filter"}, "Filter output based on conditions provided")
cmd.Require(flag.Max, 1)
utils.ParseFlags(cmd, args, true)
// Consolidate all filter flags, and sanity check them early.
// They'll get process in the daemon/server.
imageFilterArgs := filters.Args{}
for _, f := range flFilter.GetAll() {
var err error
imageFilterArgs, err = filters.ParseFlag(f, imageFilterArgs)
if err != nil {
return err
}
}
matchName := cmd.Arg(0)
// FIXME: --viz and --tree are deprecated. Remove them in a future version.
if *flViz || *flTree {
v := url.Values{
"all": []string{"1"},
}
if len(imageFilterArgs) > 0 {
filterJson, err := filters.ToParam(imageFilterArgs)
if err != nil {
return err
}
v.Set("filters", filterJson)
}
body, _, err := readBody(cli.call("GET", "/images/json?"+v.Encode(), nil, false))
if err != nil {
return err
}
outs := engine.NewTable("Created", 0)
if _, err := outs.ReadListFrom(body); err != nil {
return err
}
var (
printNode func(cli *DockerCli, noTrunc bool, image *engine.Env, prefix string)
startImage *engine.Env
roots = engine.NewTable("Created", outs.Len())
byParent = make(map[string]*engine.Table)
)
for _, image := range outs.Data {
if image.Get("ParentId") == "" {
roots.Add(image)
} else {
if children, exists := byParent[image.Get("ParentId")]; exists {
children.Add(image)
} else {
byParent[image.Get("ParentId")] = engine.NewTable("Created", 1)
byParent[image.Get("ParentId")].Add(image)
}
}
if matchName != "" {
if matchName == image.Get("Id") || matchName == common.TruncateID(image.Get("Id")) {
startImage = image
}
for _, repotag := range image.GetList("RepoTags") {
if repotag == matchName {
startImage = image
}
}
}
}
if *flViz {
fmt.Fprintf(cli.out, "digraph docker {\n")
printNode = (*DockerCli).printVizNode
} else {
printNode = (*DockerCli).printTreeNode
}
if startImage != nil {
root := engine.NewTable("Created", 1)
root.Add(startImage)
cli.WalkTree(*noTrunc, root, byParent, "", printNode)
} else if matchName == "" {
cli.WalkTree(*noTrunc, roots, byParent, "", printNode)
}
if *flViz {
fmt.Fprintf(cli.out, " base [style=invisible]\n}\n")
}
} else {
v := url.Values{}
if len(imageFilterArgs) > 0 {
filterJson, err := filters.ToParam(imageFilterArgs)
if err != nil {
return err
}
v.Set("filters", filterJson)
}
if cmd.NArg() == 1 {
// FIXME rename this parameter, to not be confused with the filters flag
v.Set("filter", matchName)
}
if *all {
v.Set("all", "1")
}
body, _, err := readBody(cli.call("GET", "/images/json?"+v.Encode(), nil, false))
if err != nil {
return err
}
outs := engine.NewTable("Created", 0)
if _, err := outs.ReadListFrom(body); err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
if *showDigests {
fmt.Fprintln(w, "REPOSITORY\tTAG\tDIGEST\tIMAGE ID\tCREATED\tVIRTUAL SIZE")
} else {
fmt.Fprintln(w, "REPOSITORY\tTAG\tIMAGE ID\tCREATED\tVIRTUAL SIZE")
}
}
for _, out := range outs.Data {
outID := out.Get("Id")
if !*noTrunc {
outID = common.TruncateID(outID)
}
repoTags := out.GetList("RepoTags")
repoDigests := out.GetList("RepoDigests")
if len(repoTags) == 1 && repoTags[0] == "<none>:<none>" && len(repoDigests) == 1 && repoDigests[0] == "<none>@<none>" {
// dangling image - clear out either repoTags or repoDigsts so we only show it once below
repoDigests = []string{}
}
// combine the tags and digests lists
tagsAndDigests := append(repoTags, repoDigests...)
for _, repoAndRef := range tagsAndDigests {
repo, ref := parsers.ParseRepositoryTag(repoAndRef)
// default tag and digest to none - if there's a value, it'll be set below
tag := "<none>"
digest := "<none>"
if utils.DigestReference(ref) {
digest = ref
} else {
tag = ref
}
if !*quiet {
if *showDigests {
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s ago\t%s\n", repo, tag, digest, outID, units.HumanDuration(time.Now().UTC().Sub(time.Unix(out.GetInt64("Created"), 0))), units.HumanSize(float64(out.GetInt64("VirtualSize"))))
} else {
fmt.Fprintf(w, "%s\t%s\t%s\t%s ago\t%s\n", repo, tag, outID, units.HumanDuration(time.Now().UTC().Sub(time.Unix(out.GetInt64("Created"), 0))), units.HumanSize(float64(out.GetInt64("VirtualSize"))))
}
} else {
fmt.Fprintln(w, outID)
}
}
}
if !*quiet {
w.Flush()
}
}
return nil
}
// FIXME: --viz and --tree are deprecated. Remove them in a future version.
func (cli *DockerCli) WalkTree(noTrunc bool, images *engine.Table, byParent map[string]*engine.Table, prefix string, printNode func(cli *DockerCli, noTrunc bool, image *engine.Env, prefix string)) {
length := images.Len()
if length > 1 {
for index, image := range images.Data {
if index+1 == length {
printNode(cli, noTrunc, image, prefix+"└─")
if subimages, exists := byParent[image.Get("Id")]; exists {
cli.WalkTree(noTrunc, subimages, byParent, prefix+" ", printNode)
}
} else {
printNode(cli, noTrunc, image, prefix+"\u251C─")
if subimages, exists := byParent[image.Get("Id")]; exists {
cli.WalkTree(noTrunc, subimages, byParent, prefix+"\u2502 ", printNode)
}
}
}
} else {
for _, image := range images.Data {
printNode(cli, noTrunc, image, prefix+"└─")
if subimages, exists := byParent[image.Get("Id")]; exists {
cli.WalkTree(noTrunc, subimages, byParent, prefix+" ", printNode)
}
}
}
}
// FIXME: --viz and --tree are deprecated. Remove them in a future version.
func (cli *DockerCli) printVizNode(noTrunc bool, image *engine.Env, prefix string) {
var (
imageID string
parentID string
)
if noTrunc {
imageID = image.Get("Id")
parentID = image.Get("ParentId")
} else {
imageID = common.TruncateID(image.Get("Id"))
parentID = common.TruncateID(image.Get("ParentId"))
}
if parentID == "" {
fmt.Fprintf(cli.out, " base -> \"%s\" [style=invis]\n", imageID)
} else {
fmt.Fprintf(cli.out, " \"%s\" -> \"%s\"\n", parentID, imageID)
}
if image.GetList("RepoTags")[0] != "<none>:<none>" {
fmt.Fprintf(cli.out, " \"%s\" [label=\"%s\\n%s\",shape=box,fillcolor=\"paleturquoise\",style=\"filled,rounded\"];\n",
imageID, imageID, strings.Join(image.GetList("RepoTags"), "\\n"))
}
}
// FIXME: --viz and --tree are deprecated. Remove them in a future version.
func (cli *DockerCli) printTreeNode(noTrunc bool, image *engine.Env, prefix string) {
var imageID string
if noTrunc {
imageID = image.Get("Id")
} else {
imageID = common.TruncateID(image.Get("Id"))
}
fmt.Fprintf(cli.out, "%s%s Virtual Size: %s", prefix, imageID, units.HumanSize(float64(image.GetInt64("VirtualSize"))))
if image.GetList("RepoTags")[0] != "<none>:<none>" {
fmt.Fprintf(cli.out, " Tags: %s\n", strings.Join(image.GetList("RepoTags"), ", "))
} else {
fmt.Fprint(cli.out, "\n")
}
}
func (cli *DockerCli) CmdPs(args ...string) error {
var (
err error
psFilterArgs = filters.Args{}
v = url.Values{}
cmd = cli.Subcmd("ps", "", "List containers", true)
quiet = cmd.Bool([]string{"q", "-quiet"}, false, "Only display numeric IDs")
size = cmd.Bool([]string{"s", "-size"}, false, "Display total file sizes")
all = cmd.Bool([]string{"a", "-all"}, false, "Show all containers (default shows just running)")
noTrunc = cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
nLatest = cmd.Bool([]string{"l", "-latest"}, false, "Show the latest created container, include non-running")
since = cmd.String([]string{"#sinceId", "#-since-id", "-since"}, "", "Show created since Id or Name, include non-running")
before = cmd.String([]string{"#beforeId", "#-before-id", "-before"}, "", "Show only container created before Id or Name")
last = cmd.Int([]string{"n"}, -1, "Show n last created containers, include non-running")
flFilter = opts.NewListOpts(nil)
)
cmd.Require(flag.Exact, 0)
cmd.Var(&flFilter, []string{"f", "-filter"}, "Filter output based on conditions provided")
utils.ParseFlags(cmd, args, true)
if *last == -1 && *nLatest {
*last = 1
}
if *all {
v.Set("all", "1")
}
if *last != -1 {
v.Set("limit", strconv.Itoa(*last))
}
if *since != "" {
v.Set("since", *since)
}
if *before != "" {
v.Set("before", *before)
}
if *size {
v.Set("size", "1")
}
// Consolidate all filter flags, and sanity check them.
// They'll get processed in the daemon/server.
for _, f := range flFilter.GetAll() {
if psFilterArgs, err = filters.ParseFlag(f, psFilterArgs); err != nil {
return err
}
}
if len(psFilterArgs) > 0 {
filterJson, err := filters.ToParam(psFilterArgs)
if err != nil {
return err
}
v.Set("filters", filterJson)
}
body, _, err := readBody(cli.call("GET", "/containers/json?"+v.Encode(), nil, false))
if err != nil {
return err
}
outs := engine.NewTable("Created", 0)
if _, err := outs.ReadListFrom(body); err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
fmt.Fprint(w, "CONTAINER ID\tIMAGE\tCOMMAND\tCREATED\tSTATUS\tPORTS\tNAMES")
if *size {
fmt.Fprintln(w, "\tSIZE")
} else {
fmt.Fprint(w, "\n")
}
}
stripNamePrefix := func(ss []string) []string {
for i, s := range ss {
ss[i] = s[1:]
}
return ss
}
for _, out := range outs.Data {
outID := out.Get("Id")
if !*noTrunc {
outID = common.TruncateID(outID)
}
if *quiet {
fmt.Fprintln(w, outID)
continue
}
var (
outNames = stripNamePrefix(out.GetList("Names"))
outCommand = strconv.Quote(out.Get("Command"))
ports = engine.NewTable("", 0)
)
if !*noTrunc {
outCommand = utils.Trunc(outCommand, 20)
// only display the default name for the container with notrunc is passed
for _, name := range outNames {
if len(strings.Split(name, "/")) == 1 {
outNames = []string{name}
break
}
}
}
ports.ReadListFrom([]byte(out.Get("Ports")))
image := out.Get("Image")
if image == "" {
image = "<no image>"
}
fmt.Fprintf(w, "%s\t%s\t%s\t%s ago\t%s\t%s\t%s\t", outID, image, outCommand,
units.HumanDuration(time.Now().UTC().Sub(time.Unix(out.GetInt64("Created"), 0))),
out.Get("Status"), api.DisplayablePorts(ports), strings.Join(outNames, ","))
if *size {
if out.GetInt("SizeRootFs") > 0 {
fmt.Fprintf(w, "%s (virtual %s)\n", units.HumanSize(float64(out.GetInt64("SizeRw"))), units.HumanSize(float64(out.GetInt64("SizeRootFs"))))
} else {
fmt.Fprintf(w, "%s\n", units.HumanSize(float64(out.GetInt64("SizeRw"))))
}
continue
}
fmt.Fprint(w, "\n")
}
if !*quiet {
w.Flush()
}
return nil
}
func (cli *DockerCli) CmdCommit(args ...string) error {
cmd := cli.Subcmd("commit", "CONTAINER [REPOSITORY[:TAG]]", "Create a new image from a container's changes", true)
flPause := cmd.Bool([]string{"p", "-pause"}, true, "Pause container during commit")
flComment := cmd.String([]string{"m", "-message"}, "", "Commit message")
flAuthor := cmd.String([]string{"a", "#author", "-author"}, "", "Author (e.g., \"John Hannibal Smith <[email protected]>\")")
flChanges := opts.NewListOpts(nil)
cmd.Var(&flChanges, []string{"c", "-change"}, "Apply Dockerfile instruction to the created image")
// FIXME: --run is deprecated, it will be replaced with inline Dockerfile commands.
flConfig := cmd.String([]string{"#run", "#-run"}, "", "This option is deprecated and will be removed in a future version in favor of inline Dockerfile-compatible commands")
cmd.Require(flag.Max, 2)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
var (
name = cmd.Arg(0)
repository, tag = parsers.ParseRepositoryTag(cmd.Arg(1))
)
//Check if the given image name can be resolved
if repository != "" {
if err := registry.ValidateRepositoryName(repository); err != nil {
return err
}
}
v := url.Values{}
v.Set("container", name)
v.Set("repo", repository)
v.Set("tag", tag)
v.Set("comment", *flComment)
v.Set("author", *flAuthor)
for _, change := range flChanges.GetAll() {
v.Add("changes", change)
}
if *flPause != true {
v.Set("pause", "0")
}
var (
config *runconfig.Config
env engine.Env
)
if *flConfig != "" {
config = &runconfig.Config{}
if err := json.Unmarshal([]byte(*flConfig), config); err != nil {
return err
}
}
stream, _, err := cli.call("POST", "/commit?"+v.Encode(), config, false)
if err != nil {
return err
}
if err := env.Decode(stream); err != nil {
return err
}
fmt.Fprintf(cli.out, "%s\n", env.Get("Id"))
return nil
}
func (cli *DockerCli) CmdEvents(args ...string) error {
cmd := cli.Subcmd("events", "", "Get real time events from the server", true)
since := cmd.String([]string{"#since", "-since"}, "", "Show all events created since timestamp")
until := cmd.String([]string{"-until"}, "", "Stream events until this timestamp")
flFilter := opts.NewListOpts(nil)
cmd.Var(&flFilter, []string{"f", "-filter"}, "Filter output based on conditions provided")
cmd.Require(flag.Exact, 0)
utils.ParseFlags(cmd, args, true)
var (
v = url.Values{}
loc = time.FixedZone(time.Now().Zone())
eventFilterArgs = filters.Args{}
)
// Consolidate all filter flags, and sanity check them early.
// They'll get process in the daemon/server.
for _, f := range flFilter.GetAll() {
var err error
eventFilterArgs, err = filters.ParseFlag(f, eventFilterArgs)
if err != nil {
return err
}
}
var setTime = func(key, value string) {
format := timeutils.RFC3339NanoFixed
if len(value) < len(format) {
format = format[:len(value)]
}
if t, err := time.ParseInLocation(format, value, loc); err == nil {
v.Set(key, strconv.FormatInt(t.Unix(), 10))
} else {
v.Set(key, value)
}
}
if *since != "" {
setTime("since", *since)
}
if *until != "" {
setTime("until", *until)
}
if len(eventFilterArgs) > 0 {
filterJson, err := filters.ToParam(eventFilterArgs)
if err != nil {
return err
}
v.Set("filters", filterJson)
}
if err := cli.stream("GET", "/events?"+v.Encode(), nil, cli.out, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) CmdExport(args ...string) error {
cmd := cli.Subcmd("export", "CONTAINER", "Export a filesystem as a tar archive (streamed to STDOUT by default)", true)
outfile := cmd.String([]string{"o", "-output"}, "", "Write to a file, instead of STDOUT")
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
var (
output io.Writer = cli.out
err error
)
if *outfile != "" {
output, err = os.Create(*outfile)
if err != nil {
return err
}
} else if cli.isTerminalOut {
return errors.New("Cowardly refusing to save to a terminal. Use the -o flag or redirect.")
}
if len(cmd.Args()) == 1 {
image := cmd.Arg(0)
if err := cli.stream("GET", "/containers/"+image+"/export", nil, output, nil); err != nil {
return err
}
} else {
v := url.Values{}
for _, arg := range cmd.Args() {
v.Add("names", arg)
}
if err := cli.stream("GET", "/containers/get?"+v.Encode(), nil, output, nil); err != nil {
return err
}
}
return nil
}
func (cli *DockerCli) CmdDiff(args ...string) error {
cmd := cli.Subcmd("diff", "CONTAINER", "Inspect changes on a container's filesystem", true)
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
body, _, err := readBody(cli.call("GET", "/containers/"+cmd.Arg(0)+"/changes", nil, false))
if err != nil {
return err
}
outs := engine.NewTable("", 0)
if _, err := outs.ReadListFrom(body); err != nil {
return err
}
for _, change := range outs.Data {
var kind string
switch change.GetInt("Kind") {
case archive.ChangeModify:
kind = "C"
case archive.ChangeAdd:
kind = "A"
case archive.ChangeDelete:
kind = "D"
}
fmt.Fprintf(cli.out, "%s %s\n", kind, change.Get("Path"))
}
return nil
}
func (cli *DockerCli) CmdLogs(args ...string) error {
var (
cmd = cli.Subcmd("logs", "CONTAINER", "Fetch the logs of a container", true)
follow = cmd.Bool([]string{"f", "-follow"}, false, "Follow log output")
times = cmd.Bool([]string{"t", "-timestamps"}, false, "Show timestamps")
tail = cmd.String([]string{"-tail"}, "all", "Number of lines to show from the end of the logs")
)
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
name := cmd.Arg(0)
stream, _, err := cli.call("GET", "/containers/"+name+"/json", nil, false)
if err != nil {
return err
}
env := engine.Env{}
if err := env.Decode(stream); err != nil {
return err
}
if env.GetSubEnv("HostConfig").GetSubEnv("LogConfig").Get("Type") != "json-file" {<|fim▁hole|> return fmt.Errorf("\"logs\" command is supported only for \"json-file\" logging driver")
}
v := url.Values{}
v.Set("stdout", "1")
v.Set("stderr", "1")
if *times {
v.Set("timestamps", "1")
}
if *follow {
v.Set("follow", "1")
}
v.Set("tail", *tail)
return cli.streamHelper("GET", "/containers/"+name+"/logs?"+v.Encode(), env.GetSubEnv("Config").GetBool("Tty"), nil, cli.out, cli.err, nil)
}
func (cli *DockerCli) CmdAttach(args ...string) error {
var (
cmd = cli.Subcmd("attach", "CONTAINER", "Attach to a running container", true)
noStdin = cmd.Bool([]string{"#nostdin", "-no-stdin"}, false, "Do not attach STDIN")
proxy = cmd.Bool([]string{"#sig-proxy", "-sig-proxy"}, true, "Proxy all received signals to the process")
)
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
name := cmd.Arg(0)
stream, _, err := cli.call("GET", "/containers/"+name+"/json", nil, false)
if err != nil {
return err
}
env := engine.Env{}
if err := env.Decode(stream); err != nil {
return err
}
if !env.GetSubEnv("State").GetBool("Running") {
return fmt.Errorf("You cannot attach to a stopped container, start it first")
}
var (
config = env.GetSubEnv("Config")
tty = config.GetBool("Tty")
)
if err := cli.CheckTtyInput(!*noStdin, tty); err != nil {
return err
}
if tty && cli.isTerminalOut {
if err := cli.monitorTtySize(cmd.Arg(0), false); err != nil {
log.Debugf("Error monitoring TTY size: %s", err)
}
}
var in io.ReadCloser
v := url.Values{}
v.Set("stream", "1")
if !*noStdin && config.GetBool("OpenStdin") {
v.Set("stdin", "1")
in = cli.in
}
v.Set("stdout", "1")
v.Set("stderr", "1")
if *proxy && !tty {
sigc := cli.forwardAllSignals(cmd.Arg(0))
defer signal.StopCatch(sigc)
}
if err := cli.hijack("POST", "/containers/"+cmd.Arg(0)+"/attach?"+v.Encode(), tty, in, cli.out, cli.err, nil, nil); err != nil {
return err
}
_, status, err := getExitCode(cli, cmd.Arg(0))
if err != nil {
return err
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
func (cli *DockerCli) CmdSearch(args ...string) error {
cmd := cli.Subcmd("search", "TERM", "Search the Docker Hub for images", true)
noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
trusted := cmd.Bool([]string{"#t", "#trusted", "#-trusted"}, false, "Only show trusted builds")
automated := cmd.Bool([]string{"-automated"}, false, "Only show automated builds")
stars := cmd.Int([]string{"s", "#stars", "-stars"}, 0, "Only displays with at least x stars")
cmd.Require(flag.Exact, 1)
utils.ParseFlags(cmd, args, true)
v := url.Values{}
v.Set("term", cmd.Arg(0))
body, _, err := readBody(cli.call("GET", "/images/search?"+v.Encode(), nil, true))
if err != nil {
return err
}
outs := engine.NewTable("star_count", 0)
if _, err := outs.ReadListFrom(body); err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 10, 1, 3, ' ', 0)
fmt.Fprintf(w, "NAME\tDESCRIPTION\tSTARS\tOFFICIAL\tAUTOMATED\n")
for _, out := range outs.Data {
if ((*automated || *trusted) && (!out.GetBool("is_trusted") && !out.GetBool("is_automated"))) || (*stars > out.GetInt("star_count")) {
continue
}
desc := strings.Replace(out.Get("description"), "\n", " ", -1)
desc = strings.Replace(desc, "\r", " ", -1)
if !*noTrunc && len(desc) > 45 {
desc = utils.Trunc(desc, 42) + "..."
}
fmt.Fprintf(w, "%s\t%s\t%d\t", out.Get("name"), desc, out.GetInt("star_count"))
if out.GetBool("is_official") {
fmt.Fprint(w, "[OK]")
}
fmt.Fprint(w, "\t")
if out.GetBool("is_automated") || out.GetBool("is_trusted") {
fmt.Fprint(w, "[OK]")
}
fmt.Fprint(w, "\n")
}
w.Flush()
return nil
}
// Ports type - Used to parse multiple -p flags
type ports []int
func (cli *DockerCli) CmdTag(args ...string) error {
cmd := cli.Subcmd("tag", "IMAGE[:TAG] [REGISTRYHOST/][USERNAME/]NAME[:TAG]", "Tag an image into a repository", true)
force := cmd.Bool([]string{"f", "#force", "-force"}, false, "Force")
cmd.Require(flag.Exact, 2)
utils.ParseFlags(cmd, args, true)
var (
repository, tag = parsers.ParseRepositoryTag(cmd.Arg(1))
v = url.Values{}
)
//Check if the given image name can be resolved
if err := registry.ValidateRepositoryName(repository); err != nil {
return err
}
v.Set("repo", repository)
v.Set("tag", tag)
if *force {
v.Set("force", "1")
}
if _, _, err := readBody(cli.call("POST", "/images/"+cmd.Arg(0)+"/tag?"+v.Encode(), nil, false)); err != nil {
return err
}
return nil
}
func (cli *DockerCli) pullImage(image string) error {
return cli.pullImageCustomOut(image, cli.out)
}
func (cli *DockerCli) pullImageCustomOut(image string, out io.Writer) error {
v := url.Values{}
repos, tag := parsers.ParseRepositoryTag(image)
// pull only the image tagged 'latest' if no tag was specified
if tag == "" {
tag = graph.DEFAULTTAG
}
v.Set("fromImage", repos)
v.Set("tag", tag)
// Resolve the Repository name from fqn to RepositoryInfo
repoInfo, err := registry.ParseRepositoryInfo(repos)
if err != nil {
return err
}
// Load the auth config file, to be able to pull the image
cli.LoadConfigFile()
// Resolve the Auth config relevant for this server
authConfig := cli.configFile.ResolveAuthConfig(repoInfo.Index)
buf, err := json.Marshal(authConfig)
if err != nil {
return err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
if err = cli.stream("POST", "/images/create?"+v.Encode(), nil, out, map[string][]string{"X-Registry-Auth": registryAuthHeader}); err != nil {
return err
}
return nil
}
type cidFile struct {
path string
file *os.File
written bool
}
func newCIDFile(path string) (*cidFile, error) {
if _, err := os.Stat(path); err == nil {
return nil, fmt.Errorf("Container ID file found, make sure the other container isn't running or delete %s", path)
}
f, err := os.Create(path)
if err != nil {
return nil, fmt.Errorf("Failed to create the container ID file: %s", err)
}
return &cidFile{path: path, file: f}, nil
}
func (cid *cidFile) Close() error {
cid.file.Close()
if !cid.written {
if err := os.Remove(cid.path); err != nil {
return fmt.Errorf("failed to remove the CID file '%s': %s \n", cid.path, err)
}
}
return nil
}
func (cid *cidFile) Write(id string) error {
if _, err := cid.file.Write([]byte(id)); err != nil {
return fmt.Errorf("Failed to write the container ID to the file: %s", err)
}
cid.written = true
return nil
}
func (cli *DockerCli) createContainer(config *runconfig.Config, hostConfig *runconfig.HostConfig, cidfile, name string) (*types.ContainerCreateResponse, error) {
containerValues := url.Values{}
if name != "" {
containerValues.Set("name", name)
}
mergedConfig := runconfig.MergeConfigs(config, hostConfig)
var containerIDFile *cidFile
if cidfile != "" {
var err error
if containerIDFile, err = newCIDFile(cidfile); err != nil {
return nil, err
}
defer containerIDFile.Close()
}
//create the container
stream, statusCode, err := cli.call("POST", "/containers/create?"+containerValues.Encode(), mergedConfig, false)
//if image not found try to pull it
if statusCode == 404 {
repo, tag := parsers.ParseRepositoryTag(config.Image)
if tag == "" {
tag = graph.DEFAULTTAG
}
fmt.Fprintf(cli.err, "Unable to find image '%s' locally\n", utils.ImageReference(repo, tag))
// we don't want to write to stdout anything apart from container.ID
if err = cli.pullImageCustomOut(config.Image, cli.err); err != nil {
return nil, err
}
// Retry
if stream, _, err = cli.call("POST", "/containers/create?"+containerValues.Encode(), mergedConfig, false); err != nil {
return nil, err
}
} else if err != nil {
return nil, err
}
var response types.ContainerCreateResponse
if err := json.NewDecoder(stream).Decode(&response); err != nil {
return nil, err
}
for _, warning := range response.Warnings {
fmt.Fprintf(cli.err, "WARNING: %s\n", warning)
}
if containerIDFile != nil {
if err = containerIDFile.Write(response.ID); err != nil {
return nil, err
}
}
return &response, nil
}
func (cli *DockerCli) CmdCreate(args ...string) error {
cmd := cli.Subcmd("create", "IMAGE [COMMAND] [ARG...]", "Create a new container", true)
// These are flags not stored in Config/HostConfig
var (
flName = cmd.String([]string{"-name"}, "", "Assign a name to the container")
)
config, hostConfig, cmd, err := runconfig.Parse(cmd, args)
if err != nil {
utils.ReportError(cmd, err.Error(), true)
}
if config.Image == "" {
cmd.Usage()
return nil
}
response, err := cli.createContainer(config, hostConfig, hostConfig.ContainerIDFile, *flName)
if err != nil {
return err
}
fmt.Fprintf(cli.out, "%s\n", response.ID)
return nil
}
func (cli *DockerCli) CmdRun(args ...string) error {
// FIXME: just use runconfig.Parse already
cmd := cli.Subcmd("run", "IMAGE [COMMAND] [ARG...]", "Run a command in a new container", true)
// These are flags not stored in Config/HostConfig
var (
flAutoRemove = cmd.Bool([]string{"#rm", "-rm"}, false, "Automatically remove the container when it exits")
flDetach = cmd.Bool([]string{"d", "-detach"}, false, "Run container in background and print container ID")
flSigProxy = cmd.Bool([]string{"#sig-proxy", "-sig-proxy"}, true, "Proxy received signals to the process")
flName = cmd.String([]string{"#name", "-name"}, "", "Assign a name to the container")
flAttach *opts.ListOpts
ErrConflictAttachDetach = fmt.Errorf("Conflicting options: -a and -d")
ErrConflictRestartPolicyAndAutoRemove = fmt.Errorf("Conflicting options: --restart and --rm")
ErrConflictDetachAutoRemove = fmt.Errorf("Conflicting options: --rm and -d")
)
config, hostConfig, cmd, err := runconfig.Parse(cmd, args)
// just in case the Parse does not exit
if err != nil {
utils.ReportError(cmd, err.Error(), true)
}
if len(hostConfig.Dns) > 0 {
// check the DNS settings passed via --dns against
// localhost regexp to warn if they are trying to
// set a DNS to a localhost address
for _, dnsIP := range hostConfig.Dns {
if resolvconf.IsLocalhost(dnsIP) {
fmt.Fprintf(cli.err, "WARNING: Localhost DNS setting (--dns=%s) may fail in containers.\n", dnsIP)
break
}
}
}
if config.Image == "" {
cmd.Usage()
return nil
}
if !*flDetach {
if err := cli.CheckTtyInput(config.AttachStdin, config.Tty); err != nil {
return err
}
} else {
if fl := cmd.Lookup("-attach"); fl != nil {
flAttach = fl.Value.(*opts.ListOpts)
if flAttach.Len() != 0 {
return ErrConflictAttachDetach
}
}
if *flAutoRemove {
return ErrConflictDetachAutoRemove
}
config.AttachStdin = false
config.AttachStdout = false
config.AttachStderr = false
config.StdinOnce = false
}
// Disable flSigProxy when in TTY mode
sigProxy := *flSigProxy
if config.Tty {
sigProxy = false
}
createResponse, err := cli.createContainer(config, hostConfig, hostConfig.ContainerIDFile, *flName)
if err != nil {
return err
}
if sigProxy {
sigc := cli.forwardAllSignals(createResponse.ID)
defer signal.StopCatch(sigc)
}
var (
waitDisplayId chan struct{}
errCh chan error
)
if !config.AttachStdout && !config.AttachStderr {
// Make this asynchronous to allow the client to write to stdin before having to read the ID
waitDisplayId = make(chan struct{})
go func() {
defer close(waitDisplayId)
fmt.Fprintf(cli.out, "%s\n", createResponse.ID)
}()
}
if *flAutoRemove && (hostConfig.RestartPolicy.Name == "always" || hostConfig.RestartPolicy.Name == "on-failure") {
return ErrConflictRestartPolicyAndAutoRemove
}
// We need to instantiate the chan because the select needs it. It can
// be closed but can't be uninitialized.
hijacked := make(chan io.Closer)
// Block the return until the chan gets closed
defer func() {
log.Debugf("End of CmdRun(), Waiting for hijack to finish.")
if _, ok := <-hijacked; ok {
log.Errorf("Hijack did not finish (chan still open)")
}
}()
if config.AttachStdin || config.AttachStdout || config.AttachStderr {
var (
out, stderr io.Writer
in io.ReadCloser
v = url.Values{}
)
v.Set("stream", "1")
if config.AttachStdin {
v.Set("stdin", "1")
in = cli.in
}
if config.AttachStdout {
v.Set("stdout", "1")
out = cli.out
}
if config.AttachStderr {
v.Set("stderr", "1")
if config.Tty {
stderr = cli.out
} else {
stderr = cli.err
}
}
errCh = promise.Go(func() error {
return cli.hijack("POST", "/containers/"+createResponse.ID+"/attach?"+v.Encode(), config.Tty, in, out, stderr, hijacked, nil)
})
} else {
close(hijacked)
}
// Acknowledge the hijack before starting
select {
case closer := <-hijacked:
// Make sure that the hijack gets closed when returning (results
// in closing the hijack chan and freeing server's goroutines)
if closer != nil {
defer closer.Close()
}
case err := <-errCh:
if err != nil {
log.Debugf("Error hijack: %s", err)
return err
}
}
defer func() {
if *flAutoRemove {
if _, _, err = readBody(cli.call("DELETE", "/containers/"+createResponse.ID+"?v=1", nil, false)); err != nil {
log.Errorf("Error deleting container: %s", err)
}
}
}()
//start the container
if _, _, err = readBody(cli.call("POST", "/containers/"+createResponse.ID+"/start", nil, false)); err != nil {
return err
}
if (config.AttachStdin || config.AttachStdout || config.AttachStderr) && config.Tty && cli.isTerminalOut {
if err := cli.monitorTtySize(createResponse.ID, false); err != nil {
log.Errorf("Error monitoring TTY size: %s", err)
}
}
if errCh != nil {
if err := <-errCh; err != nil {
log.Debugf("Error hijack: %s", err)
return err
}
}
// Detached mode: wait for the id to be displayed and return.
if !config.AttachStdout && !config.AttachStderr {
// Detached mode
<-waitDisplayId
return nil
}
var status int
// Attached mode
if *flAutoRemove {
// Autoremove: wait for the container to finish, retrieve
// the exit code and remove the container
if _, _, err := readBody(cli.call("POST", "/containers/"+createResponse.ID+"/wait", nil, false)); err != nil {
return err
}
if _, status, err = getExitCode(cli, createResponse.ID); err != nil {
return err
}
} else {
// No Autoremove: Simply retrieve the exit code
if !config.Tty {
// In non-TTY mode, we can't detach, so we must wait for container exit
if status, err = waitForExit(cli, createResponse.ID); err != nil {
return err
}
} else {
// In TTY mode, there is a race: if the process dies too slowly, the state could
// be updated after the getExitCode call and result in the wrong exit code being reported
if _, status, err = getExitCode(cli, createResponse.ID); err != nil {
return err
}
}
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
func (cli *DockerCli) CmdCp(args ...string) error {
cmd := cli.Subcmd("cp", "CONTAINER:PATH HOSTDIR|-", "Copy files/folders from a PATH on the container to a HOSTDIR on the host\nrunning the command. Use '-' to write the data\nas a tar file to STDOUT.", true)
cmd.Require(flag.Exact, 2)
utils.ParseFlags(cmd, args, true)
var copyData engine.Env
info := strings.Split(cmd.Arg(0), ":")
if len(info) != 2 {
return fmt.Errorf("Error: Path not specified")
}
copyData.Set("Resource", info[1])
copyData.Set("HostPath", cmd.Arg(1))
stream, statusCode, err := cli.call("POST", "/containers/"+info[0]+"/copy", copyData, false)
if stream != nil {
defer stream.Close()
}
if statusCode == 404 {
return fmt.Errorf("No such container: %v", info[0])
}
if err != nil {
return err
}
if statusCode == 200 {
dest := copyData.Get("HostPath")
if dest == "-" {
_, err = io.Copy(cli.out, stream)
} else {
err = archive.Untar(stream, dest, &archive.TarOptions{NoLchown: true})
}
if err != nil {
return err
}
}
return nil
}
func (cli *DockerCli) CmdSave(args ...string) error {
cmd := cli.Subcmd("save", "IMAGE [IMAGE...]", "Save an image(s) to a tar archive (streamed to STDOUT by default)", true)
outfile := cmd.String([]string{"o", "-output"}, "", "Write to an file, instead of STDOUT")
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
var (
output io.Writer = cli.out
err error
)
if *outfile != "" {
output, err = os.Create(*outfile)
if err != nil {
return err
}
} else if cli.isTerminalOut {
return errors.New("Cowardly refusing to save to a terminal. Use the -o flag or redirect.")
}
if len(cmd.Args()) == 1 {
image := cmd.Arg(0)
if err := cli.stream("GET", "/images/"+image+"/get", nil, output, nil); err != nil {
return err
}
} else {
v := url.Values{}
for _, arg := range cmd.Args() {
v.Add("names", arg)
}
if err := cli.stream("GET", "/images/get?"+v.Encode(), nil, output, nil); err != nil {
return err
}
}
return nil
}
func (cli *DockerCli) CmdLoad(args ...string) error {
cmd := cli.Subcmd("load", "", "Load an image from a tar archive on STDIN", true)
infile := cmd.String([]string{"i", "-input"}, "", "Read from a tar archive file, instead of STDIN")
cmd.Require(flag.Exact, 0)
utils.ParseFlags(cmd, args, true)
var (
input io.Reader = cli.in
err error
)
if *infile != "" {
input, err = os.Open(*infile)
if err != nil {
return err
}
}
if err := cli.stream("POST", "/images/load", input, cli.out, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) CmdExec(args ...string) error {
cmd := cli.Subcmd("exec", "CONTAINER COMMAND [ARG...]", "Run a command in a running container", true)
execConfig, err := runconfig.ParseExec(cmd, args)
// just in case the ParseExec does not exit
if execConfig.Container == "" || err != nil {
return &utils.StatusError{StatusCode: 1}
}
stream, _, err := cli.call("POST", "/containers/"+execConfig.Container+"/exec", execConfig, false)
if err != nil {
return err
}
var response types.ContainerExecCreateResponse
if err := json.NewDecoder(stream).Decode(&response); err != nil {
return err
}
for _, warning := range response.Warnings {
fmt.Fprintf(cli.err, "WARNING: %s\n", warning)
}
execID := response.ID
if execID == "" {
fmt.Fprintf(cli.out, "exec ID empty")
return nil
}
if !execConfig.Detach {
if err := cli.CheckTtyInput(execConfig.AttachStdin, execConfig.Tty); err != nil {
return err
}
} else {
if _, _, err := readBody(cli.call("POST", "/exec/"+execID+"/start", execConfig, false)); err != nil {
return err
}
// For now don't print this - wait for when we support exec wait()
// fmt.Fprintf(cli.out, "%s\n", execID)
return nil
}
// Interactive exec requested.
var (
out, stderr io.Writer
in io.ReadCloser
hijacked = make(chan io.Closer)
errCh chan error
)
// Block the return until the chan gets closed
defer func() {
log.Debugf("End of CmdExec(), Waiting for hijack to finish.")
if _, ok := <-hijacked; ok {
log.Errorf("Hijack did not finish (chan still open)")
}
}()
if execConfig.AttachStdin {
in = cli.in
}
if execConfig.AttachStdout {
out = cli.out
}
if execConfig.AttachStderr {
if execConfig.Tty {
stderr = cli.out
} else {
stderr = cli.err
}
}
errCh = promise.Go(func() error {
return cli.hijack("POST", "/exec/"+execID+"/start", execConfig.Tty, in, out, stderr, hijacked, execConfig)
})
// Acknowledge the hijack before starting
select {
case closer := <-hijacked:
// Make sure that hijack gets closed when returning. (result
// in closing hijack chan and freeing server's goroutines.
if closer != nil {
defer closer.Close()
}
case err := <-errCh:
if err != nil {
log.Debugf("Error hijack: %s", err)
return err
}
}
if execConfig.Tty && cli.isTerminalIn {
if err := cli.monitorTtySize(execID, true); err != nil {
log.Errorf("Error monitoring TTY size: %s", err)
}
}
if err := <-errCh; err != nil {
log.Debugf("Error hijack: %s", err)
return err
}
var status int
if _, status, err = getExecExitCode(cli, execID); err != nil {
return err
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
type containerStats struct {
Name string
CpuPercentage float64
Memory float64
MemoryLimit float64
MemoryPercentage float64
NetworkRx float64
NetworkTx float64
mu sync.RWMutex
err error
}
func (s *containerStats) Collect(cli *DockerCli) {
stream, _, err := cli.call("GET", "/containers/"+s.Name+"/stats", nil, false)
if err != nil {
s.err = err
return
}
defer stream.Close()
var (
previousCpu uint64
previousSystem uint64
start = true
dec = json.NewDecoder(stream)
u = make(chan error, 1)
)
go func() {
for {
var v *types.Stats
if err := dec.Decode(&v); err != nil {
u <- err
return
}
var (
memPercent = float64(v.MemoryStats.Usage) / float64(v.MemoryStats.Limit) * 100.0
cpuPercent = 0.0
)
if !start {
cpuPercent = calculateCpuPercent(previousCpu, previousSystem, v)
}
start = false
s.mu.Lock()
s.CpuPercentage = cpuPercent
s.Memory = float64(v.MemoryStats.Usage)
s.MemoryLimit = float64(v.MemoryStats.Limit)
s.MemoryPercentage = memPercent
s.NetworkRx = float64(v.Network.RxBytes)
s.NetworkTx = float64(v.Network.TxBytes)
s.mu.Unlock()
previousCpu = v.CpuStats.CpuUsage.TotalUsage
previousSystem = v.CpuStats.SystemUsage
u <- nil
}
}()
for {
select {
case <-time.After(2 * time.Second):
// zero out the values if we have not received an update within
// the specified duration.
s.mu.Lock()
s.CpuPercentage = 0
s.Memory = 0
s.MemoryPercentage = 0
s.mu.Unlock()
case err := <-u:
if err != nil {
s.mu.Lock()
s.err = err
s.mu.Unlock()
return
}
}
}
}
func (s *containerStats) Display(w io.Writer) error {
s.mu.RLock()
defer s.mu.RUnlock()
if s.err != nil {
return s.err
}
fmt.Fprintf(w, "%s\t%.2f%%\t%s/%s\t%.2f%%\t%s/%s\n",
s.Name,
s.CpuPercentage,
units.BytesSize(s.Memory), units.BytesSize(s.MemoryLimit),
s.MemoryPercentage,
units.BytesSize(s.NetworkRx), units.BytesSize(s.NetworkTx))
return nil
}
func (cli *DockerCli) CmdStats(args ...string) error {
cmd := cli.Subcmd("stats", "CONTAINER [CONTAINER...]", "Display a live stream of one or more containers' resource usage statistics", true)
cmd.Require(flag.Min, 1)
utils.ParseFlags(cmd, args, true)
names := cmd.Args()
sort.Strings(names)
var (
cStats []*containerStats
w = tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
)
printHeader := func() {
fmt.Fprint(cli.out, "\033[2J")
fmt.Fprint(cli.out, "\033[H")
fmt.Fprintln(w, "CONTAINER\tCPU %\tMEM USAGE/LIMIT\tMEM %\tNET I/O")
}
for _, n := range names {
s := &containerStats{Name: n}
cStats = append(cStats, s)
go s.Collect(cli)
}
// do a quick pause so that any failed connections for containers that do not exist are able to be
// evicted before we display the initial or default values.
time.Sleep(500 * time.Millisecond)
var errs []string
for _, c := range cStats {
c.mu.Lock()
if c.err != nil {
errs = append(errs, fmt.Sprintf("%s: %v", c.Name, c.err))
}
c.mu.Unlock()
}
if len(errs) > 0 {
return fmt.Errorf("%s", strings.Join(errs, ", "))
}
for _ = range time.Tick(500 * time.Millisecond) {
printHeader()
toRemove := []int{}
for i, s := range cStats {
if err := s.Display(w); err != nil {
toRemove = append(toRemove, i)
}
}
for j := len(toRemove) - 1; j >= 0; j-- {
i := toRemove[j]
cStats = append(cStats[:i], cStats[i+1:]...)
}
if len(cStats) == 0 {
return nil
}
w.Flush()
}
return nil
}
func calculateCpuPercent(previousCpu, previousSystem uint64, v *types.Stats) float64 {
var (
cpuPercent = 0.0
// calculate the change for the cpu usage of the container in between readings
cpuDelta = float64(v.CpuStats.CpuUsage.TotalUsage - previousCpu)
// calculate the change for the entire system between readings
systemDelta = float64(v.CpuStats.SystemUsage - previousSystem)
)
if systemDelta > 0.0 && cpuDelta > 0.0 {
cpuPercent = (cpuDelta / systemDelta) * float64(len(v.CpuStats.CpuUsage.PercpuUsage)) * 100.0
}
return cpuPercent
}<|fim▁end|> | |
<|file_name|>test_msgbox.py<|end_file_name|><|fim▁begin|># Copyright 2015-2021 Florian Bruhin (The Compiler) <[email protected]>
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.msgbox."""
<|fim▁hole|>from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QMessageBox, QWidget
from qutebrowser.misc import msgbox
from qutebrowser.utils import utils
@pytest.fixture(autouse=True)
def patch_args(fake_args):
fake_args.no_err_windows = False
def test_attributes(qtbot):
"""Test basic QMessageBox attributes."""
title = 'title'
text = 'text'
parent = QWidget()
qtbot.add_widget(parent)
icon = QMessageBox.Critical
buttons = QMessageBox.Ok | QMessageBox.Cancel
box = msgbox.msgbox(parent=parent, title=title, text=text, icon=icon,
buttons=buttons)
qtbot.add_widget(box)
if not utils.is_mac:
assert box.windowTitle() == title
assert box.icon() == icon
assert box.standardButtons() == buttons
assert box.text() == text
assert box.parent() is parent
@pytest.mark.parametrize('plain_text, expected', [
(True, Qt.PlainText),
(False, Qt.RichText),
(None, Qt.AutoText),
])
def test_plain_text(qtbot, plain_text, expected):
box = msgbox.msgbox(parent=None, title='foo', text='foo',
icon=QMessageBox.Information, plain_text=plain_text)
qtbot.add_widget(box)
assert box.textFormat() == expected
def test_finished_signal(qtbot):
"""Make sure we can pass a slot to be called when the dialog finished."""
signal_triggered = False
def on_finished():
nonlocal signal_triggered
signal_triggered = True
box = msgbox.msgbox(parent=None, title='foo', text='foo',
icon=QMessageBox.Information, on_finished=on_finished)
qtbot.add_widget(box)
with qtbot.waitSignal(box.finished):
box.accept()
assert signal_triggered
def test_information(qtbot):
box = msgbox.information(parent=None, title='foo', text='bar')
qtbot.add_widget(box)
if not utils.is_mac:
assert box.windowTitle() == 'foo'
assert box.text() == 'bar'
assert box.icon() == QMessageBox.Information
def test_no_err_windows(fake_args, capsys):
fake_args.no_err_windows = True
box = msgbox.information(parent=None, title='foo', text='bar')
box.exec() # should do nothing
out, err = capsys.readouterr()
assert not out
assert err == 'Message box: foo; bar\n'<|fim▁end|> | import pytest
|
<|file_name|>DatasourcesImpl.java<|end_file_name|><|fim▁begin|>/*
* IronJacamar, a Java EE Connector Architecture implementation
* Copyright 2008, Red Hat Inc, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.jca.common.metadata.ds;
import org.jboss.jca.common.api.metadata.ds.DataSource;
import org.jboss.jca.common.api.metadata.ds.DataSources;
import org.jboss.jca.common.api.metadata.ds.Driver;
import org.jboss.jca.common.api.metadata.ds.XaDataSource;
import org.jboss.jca.common.api.validator.ValidateException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* A DatasourcesImpl.
*
* @author <a href="[email protected]">Stefano Maestri</a>
*
*/
public class DatasourcesImpl implements DataSources
{
/** The serialVersionUID */
private static final long serialVersionUID = 6933310057105771370L;
private final List<DataSource> datasource;
private final List<XaDataSource> xaDataSource;
private final Map<String, Driver> drivers;
/**
* Create a new DatasourcesImpl.
*
* @param datasource datasource
* @param xaDataSource xaDataSource
* @param drivers drivers
* @throws ValidateException ValidateException
*/
public DatasourcesImpl(List<DataSource> datasource,
List<XaDataSource> xaDataSource,
Map<String, Driver> drivers)
throws ValidateException
{
super();
if (datasource != null)
{
this.datasource = new ArrayList<DataSource>(datasource.size());
this.datasource.addAll(datasource);
}
else
{
this.datasource = new ArrayList<DataSource>(0);
}
if (xaDataSource != null)
{
this.xaDataSource = new ArrayList<XaDataSource>(xaDataSource.size());
this.xaDataSource.addAll(xaDataSource);
}
else
{
this.xaDataSource = new ArrayList<XaDataSource>(0);
}
if (drivers != null)
{
this.drivers = new HashMap<String, Driver>(drivers.size());
this.drivers.putAll(drivers);
}
else
{
this.drivers = new HashMap<String, Driver>(0);
}
this.validate();
}
/**
* Get the datasource.
*
* @return the datasource.
*/
@Override
public final List<DataSource> getDataSource()
{
return Collections.unmodifiableList(datasource);
}
/**
* Get the xaDataSource.
*
* @return the xaDataSource.
*/
@Override
public final List<XaDataSource> getXaDataSource()
{
return Collections.unmodifiableList(xaDataSource);
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((datasource == null) ? 0 : datasource.hashCode());
result = prime * result + ((xaDataSource == null) ? 0 : xaDataSource.hashCode());
result = prime * result + ((drivers == null) ? 0 : drivers.hashCode());
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof DatasourcesImpl))
return false;
DatasourcesImpl other = (DatasourcesImpl) obj;
if (datasource == null)
{
if (other.datasource != null)
return false;
}
else if (!datasource.equals(other.datasource))
return false;
if (xaDataSource == null)
{
if (other.xaDataSource != null)
return false;
}
else if (!xaDataSource.equals(other.xaDataSource))
return false;
if (drivers == null)
{
if (other.drivers != null)
return false;
}
else if (!drivers.equals(other.drivers))
return false;
return true;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
sb.append("<datasources>");
if (datasource != null && datasource.size() > 0)
{
for (DataSource ds : datasource)
{
sb.append(ds);
}
}
if (xaDataSource != null && xaDataSource.size() > 0)
{
for (XaDataSource xads : xaDataSource)
{
sb.append(xads);
}
}
if (drivers != null && drivers.size() > 0)
{
sb.append("<").append(DataSources.Tag.DRIVERS).append(">");
for (Driver d : drivers.values())
{
sb.append(d);
}
sb.append("</").append(DataSources.Tag.DRIVERS).append(">");
}
sb.append("</datasources>");
return sb.toString();<|fim▁hole|> public void validate() throws ValidateException
{
//always validate if all content is validating
for (DataSource ds : this.datasource)
{
ds.validate();
}
for (XaDataSource xads : this.xaDataSource)
{
xads.validate();
}
}
@Override
public Driver getDriver(String name)
{
return drivers.get(name);
}
@Override
public List<Driver> getDrivers()
{
return Collections.unmodifiableList(new ArrayList<Driver>(drivers.values()));
}
}<|fim▁end|> | }
@Override |
<|file_name|>website_tour_forum.js<|end_file_name|><|fim▁begin|>odoo.define('website_forum.tour_forum', function (require) {
'use strict';
var core = require('web.core');
var Tour = require('web.Tour');
var _t = core._t;
Tour.register({
id: 'question',
name: _t("Create a question"),
steps: [
{
title: _t("Create a Question!"),
content: _t("Let's go through the first steps to create a new question."),<|fim▁hole|> },
{
title: _t("Add Content"),
element: '#oe_main_menu_navbar a[data-action=new_page]',
placement: 'bottom',
content: _t("Use this button to create a new forum like any other document (page, menu, products, event, ...)."),
popover: { fixed: true },
},
{
title: _t("New Forum"),
element: 'a[data-action=new_forum]',
placement: 'left',
content: _t("Select this menu item to create a new forum."),
popover: { fixed: true },
},
{
title: _t("Forum Name"),
element: '.modal #editor_new_forum input[type=text]',
sampleText:'New Forum',
placement: 'right',
content: _t("Enter a name for your new forum."),
},
{
title: _t("Create Forum"),
waitNot: ".modal #editor_new_forum input[type=text]:propValue('')",
element: '.modal button.btn-primary',
placement: 'right',
content: _t("Click <em>Continue</em> to create the forum."),
},
{
title: _t("New Forum Created"),
waitNot: '.modal:visible',
content: _t("This page contains all the information related to the new forum."),
popover: { next: _t("Continue") },
},
{
title: _t("Ask a Question"),
element: '.btn-block a:first',
placement: 'left',
content: _t("Ask the question in this forum by clicking on the button."),
},
{
title: _t("Question Title"),
element: 'input[name=post_name]',
sampleText:'First Question Title',
placement: 'top',
content: _t("Give your question title."),
},
{
title: _t("Question"),
waitNot: "input[name=post_name]:propValue('')",
element: '.note-editable p',
sampleText: 'First Question',
placement: 'top',
content: _t("Put your question here."),
},
{
title: _t("Give Tag"),
waitNot: '.note-editable p:containsExact("<br>")',
element: '.select2-choices',
placement: 'top',
content: _t("Insert tags related to your question."),
},
{
title: _t("Post Question"),
waitNot: "input[id=s2id_autogen2]:propValue('Tags')",
element: 'button:contains("Post Your Question")',
placement: 'bottom',
content: _t("Click to post your question."),
},
{
title: _t("New Question Created"),
waitFor: '.fa-star',
content: _t("This page contains the newly created questions."),
popover: { next: _t("Continue") },
},
{
title: _t("Answer"),
element: '.note-editable p',
sampleText: 'First Answer',
placement: 'top',
content: _t("Put your answer here."),
},
{
title: _t("Post Answer"),
waitNot: '.note-editable p:containsExact("<br>")',
element: 'button:contains("Post Answer")',
placement: 'bottom',
content: _t("Click to post your answer."),
},
{
title: _t("Answer Posted"),
waitFor: '.fa-check-circle',
content: _t("This page contains the newly created questions and its answers."),
popover: { next: _t("Continue") },
},
{
title: _t("Accept Answer"),
element: 'a[data-karma="20"]:first',
placement: 'right',
content: _t("Click here to accept this answer."),
},
{
title: _t("Congratulations"),
waitFor: '.oe_answer_true',
content: _t("Congratulations! You just created and post your first question and answer."),
popover: { next: _t("Close Tutorial") },
},
]
});
});<|fim▁end|> | popover: { next: _t("Start Tutorial"), end: _t("Skip It") }, |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.