prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>flow.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Servo's experimental layout system builds a tree of `Flow` and `Fragment` objects and solves
//! layout constraints to obtain positions and display attributes of tree nodes. Positions are
//! computed in several tree traversals driven by the fundamental data dependencies required by
//! inline and block layout.
//!
//! Flows are interior nodes in the layout tree and correspond closely to *flow contexts* in the
//! CSS specification. Flows are responsible for positioning their child flow contexts and
//! fragments. Flows have purpose-specific fields, such as auxiliary line structs, out-of-flow
//! child lists, and so on.
//!
//! Currently, the important types of flows are:
//!
//! * `BlockFlow`: A flow that establishes a block context. It has several child flows, each of
//! which are positioned according to block formatting context rules (CSS block boxes). Block
//! flows also contain a single box to represent their rendered borders, padding, etc.
//! The BlockFlow at the root of the tree has special behavior: it stretches to the boundaries of
//! the viewport.
//!
//! * `InlineFlow`: A flow that establishes an inline context. It has a flat list of child
//! fragments/flows that are subject to inline layout and line breaking and structs to represent
//! line breaks and mapping to CSS boxes, for the purpose of handling `getClientRects()` and
//! similar methods.
use app_units::Au;
use crate::block::{BlockFlow, FormattingContextType};
use crate::context::LayoutContext;
use crate::display_list::items::ClippingAndScrolling;
use crate::display_list::{DisplayListBuildState, StackingContextCollectionState};
use crate::flex::FlexFlow;
use crate::floats::{Floats, SpeculatedFloatPlacement};
use crate::flow_list::{FlowList, FlowListIterator, MutFlowListIterator};
use crate::flow_ref::{FlowRef, WeakFlowRef};
use crate::fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, Overflow};
use crate::inline::InlineFlow;
use crate::model::{CollapsibleMargins, IntrinsicISizes, MarginCollapseInfo};
use crate::multicol::MulticolFlow;
use crate::parallel::FlowParallelInfo;
use crate::table::TableFlow;
use crate::table_caption::TableCaptionFlow;
use crate::table_cell::TableCellFlow;
use crate::table_colgroup::TableColGroupFlow;
use crate::table_row::TableRowFlow;
use crate::table_rowgroup::TableRowGroupFlow;
use crate::table_wrapper::TableWrapperFlow;
use euclid::{Point2D, Rect, Size2D, Vector2D};
use gfx_traits::print_tree::PrintTree;
use gfx_traits::StackingContextId;
use num_traits::cast::FromPrimitive;
use serde::ser::{Serialize, SerializeStruct, Serializer};
use servo_geometry::{au_rect_to_f32_rect, f32_rect_to_au_rect, MaxRect};
use std::fmt;
use std::iter::Zip;
use std::slice::IterMut;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use style::computed_values::clear::T as Clear;
use style::computed_values::float::T as Float;
use style::computed_values::overflow_x::T as StyleOverflow;
use style::computed_values::position::T as Position;
use style::computed_values::text_align::T as TextAlign;
use style::context::SharedStyleContext;
use style::logical_geometry::{LogicalRect, LogicalSize, WritingMode};
use style::properties::ComputedValues;
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::values::computed::LengthOrPercentageOrAuto;
use webrender_api::LayoutTransform;
/// This marker trait indicates that a type is a struct with `#[repr(C)]` whose first field
/// is of type `BaseFlow` or some type that also implements this trait.
///
/// In other words, the memory representation of `BaseFlow` must be a prefix
/// of the memory representation of types implementing `HasBaseFlow`.
#[allow(unsafe_code)]
pub unsafe trait HasBaseFlow {}
/// Methods to get the `BaseFlow` from any `HasBaseFlow` type.
pub trait GetBaseFlow {
fn base(&self) -> &BaseFlow;
fn mut_base(&mut self) -> &mut BaseFlow;
}
impl<T: HasBaseFlow + ?Sized> GetBaseFlow for T {
#[inline(always)]
#[allow(unsafe_code)]
fn base(&self) -> &BaseFlow {
let ptr: *const Self = self;
let ptr = ptr as *const BaseFlow;
unsafe { &*ptr }
}
#[inline(always)]
#[allow(unsafe_code)]
fn mut_base(&mut self) -> &mut BaseFlow {
let ptr: *mut Self = self;
let ptr = ptr as *mut BaseFlow;
unsafe { &mut *ptr }
}
}
/// Virtual methods that make up a float context.
///
/// Note that virtual methods have a cost; we should not overuse them in Servo. Consider adding
/// methods to `ImmutableFlowUtils` or `MutableFlowUtils` before adding more methods here.
pub trait Flow: HasBaseFlow + fmt::Debug + Sync + Send + 'static {
// RTTI
//
// TODO(pcwalton): Use Rust's RTTI, once that works.
/// Returns the class of flow that this is.
fn class(&self) -> FlowClass;
/// If this is a block flow, returns the underlying object. Fails otherwise.
fn as_block(&self) -> &BlockFlow {
panic!("called as_block() on a non-block flow")
}
/// If this is a block flow, returns the underlying object, borrowed mutably. Fails otherwise.
fn as_mut_block(&mut self) -> &mut BlockFlow {
debug!("called as_mut_block() on a flow of type {:?}", self.class());
panic!("called as_mut_block() on a non-block flow")
}
/// If this is a flex flow, returns the underlying object. Fails otherwise.
fn as_flex(&self) -> &FlexFlow {
panic!("called as_flex() on a non-flex flow")
}
/// If this is a flex flow, returns the underlying object, borrowed mutably. Fails otherwise.
fn as_mut_flex(&mut self) -> &mut FlexFlow {
panic!("called as_mut_flex() on a non-flex flow")
}
/// If this is an inline flow, returns the underlying object. Fails otherwise.
fn as_inline(&self) -> &InlineFlow {
panic!("called as_inline() on a non-inline flow")
}
/// If this is an inline flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_inline(&mut self) -> &mut InlineFlow {
panic!("called as_mut_inline() on a non-inline flow")
}
/// If this is a table wrapper flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_table_wrapper(&mut self) -> &mut TableWrapperFlow {
panic!("called as_mut_table_wrapper() on a non-tablewrapper flow")
}
/// If this is a table wrapper flow, returns the underlying object. Fails otherwise.
fn as_table_wrapper(&self) -> &TableWrapperFlow {
panic!("called as_table_wrapper() on a non-tablewrapper flow")
}
/// If this is a table flow, returns the underlying object, borrowed mutably. Fails otherwise.
fn as_mut_table(&mut self) -> &mut TableFlow {
panic!("called as_mut_table() on a non-table flow")
}
/// If this is a table flow, returns the underlying object. Fails otherwise.
fn as_table(&self) -> &TableFlow {
panic!("called as_table() on a non-table flow")
}
/// If this is a table colgroup flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_table_colgroup(&mut self) -> &mut TableColGroupFlow {
panic!("called as_mut_table_colgroup() on a non-tablecolgroup flow")
}
/// If this is a table colgroup flow, returns the underlying object. Fails
/// otherwise.
fn as_table_colgroup(&self) -> &TableColGroupFlow {
panic!("called as_table_colgroup() on a non-tablecolgroup flow")
}
/// If this is a table rowgroup flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_table_rowgroup(&mut self) -> &mut TableRowGroupFlow {
panic!("called as_mut_table_rowgroup() on a non-tablerowgroup flow")
}
/// If this is a table rowgroup flow, returns the underlying object. Fails otherwise.
fn as_table_rowgroup(&self) -> &TableRowGroupFlow {
panic!("called as_table_rowgroup() on a non-tablerowgroup flow")
}
/// If this is a table row flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_table_row(&mut self) -> &mut TableRowFlow {
panic!("called as_mut_table_row() on a non-tablerow flow")
}
/// If this is a table row flow, returns the underlying object. Fails otherwise.
fn as_table_row(&self) -> &TableRowFlow {
panic!("called as_table_row() on a non-tablerow flow")
}
/// If this is a table cell flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_table_caption(&mut self) -> &mut TableCaptionFlow {
panic!("called as_mut_table_caption() on a non-tablecaption flow")
}
/// If this is a table cell flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_table_cell(&mut self) -> &mut TableCellFlow {
panic!("called as_mut_table_cell() on a non-tablecell flow")
}
/// If this is a multicol flow, returns the underlying object, borrowed mutably. Fails
/// otherwise.
fn as_mut_multicol(&mut self) -> &mut MulticolFlow {
panic!("called as_mut_multicol() on a non-multicol flow")
}
/// If this is a table cell flow, returns the underlying object. Fails otherwise.
fn as_table_cell(&self) -> &TableCellFlow {
panic!("called as_table_cell() on a non-tablecell flow")
}
// Main methods
/// Pass 1 of reflow: computes minimum and preferred inline-sizes.
///
/// Recursively (bottom-up) determine the flow's minimum and preferred inline-sizes. When
/// called on this flow, all child flows have had their minimum and preferred inline-sizes set.
/// This function must decide minimum/preferred inline-sizes based on its children's inline-
/// sizes and the dimensions of any boxes it is responsible for flowing.
fn bubble_inline_sizes(&mut self) {
panic!("bubble_inline_sizes not yet implemented")
}
/// Pass 2 of reflow: computes inline-size.
fn assign_inline_sizes(&mut self, _ctx: &LayoutContext) {
panic!("assign_inline_sizes not yet implemented")
}
/// Pass 3a of reflow: computes block-size.
fn assign_block_size(&mut self, _ctx: &LayoutContext) {
panic!("assign_block_size not yet implemented")
}
/// Like `assign_block_size`, but is recurses explicitly into descendants.
/// Fit as much content as possible within `available_block_size`.
/// If that’s not all of it, truncate the contents of `self`
/// and return a new flow similar to `self` with the rest of the content.
///
/// The default is to make a flow "atomic": it can not be fragmented.
fn fragment(
&mut self,
layout_context: &LayoutContext,
_fragmentation_context: Option<FragmentationContext>,
) -> Option<Arc<dyn Flow>> {
fn recursive_assign_block_size<F: ?Sized + Flow + GetBaseFlow>(
flow: &mut F,
ctx: &LayoutContext,
) {
for child in flow.mut_base().child_iter_mut() {
recursive_assign_block_size(child, ctx)
}
flow.assign_block_size(ctx);
}
recursive_assign_block_size(self, layout_context);
None
}
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState);
/// If this is a float, places it. The default implementation does nothing.
fn place_float_if_applicable<'a>(&mut self) {}
/// Assigns block-sizes in-order; or, if this is a float, places the float. The default
/// implementation simply assigns block-sizes if this flow might have floats in. Returns true
/// if it was determined that this child might have had floats in or false otherwise.
///
/// `parent_thread_id` is the thread ID of the parent. This is used for the layout tinting
/// debug mode; if the block size of this flow was determined by its parent, we should treat
/// it as laid out by its parent.
fn assign_block_size_for_inorder_child_if_necessary(
&mut self,
layout_context: &LayoutContext,
parent_thread_id: u8,
_content_box: LogicalRect<Au>,
) -> bool {
let might_have_floats_in_or_out =
self.base().might_have_floats_in() || self.base().might_have_floats_out();
if might_have_floats_in_or_out {
self.mut_base().thread_id = parent_thread_id;
self.assign_block_size(layout_context);
self.mut_base()
.restyle_damage
.remove(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW);
}
might_have_floats_in_or_out
}
fn get_overflow_in_parent_coordinates(&self) -> Overflow {
// FIXME(#2795): Get the real container size.
let container_size = Size2D::zero();
let position = self
.base()
.position
.to_physical(self.base().writing_mode, container_size);
let mut overflow = self.base().overflow;
match self.class() {
FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {},
_ => {
overflow.translate(&position.origin.to_vector());
return overflow;
},
}
let border_box = self.as_block().fragment.stacking_relative_border_box(
&self.base().stacking_relative_position,
&self
.base()
.early_absolute_position_info
.relative_containing_block_size,
self.base()
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Own,
);
if StyleOverflow::Visible != self.as_block().fragment.style.get_box().overflow_x {
overflow.paint.origin.x = Au(0);
overflow.paint.size.width = border_box.size.width;
overflow.scroll.origin.x = Au(0);
overflow.scroll.size.width = border_box.size.width;
}
if StyleOverflow::Visible != self.as_block().fragment.style.get_box().overflow_y {
overflow.paint.origin.y = Au(0);
overflow.paint.size.height = border_box.size.height;
overflow.scroll.origin.y = Au(0);
overflow.scroll.size.height = border_box.size.height;
}
if !self.as_block().fragment.establishes_stacking_context() || self
.as_block()
.fragment
.style
.get_box()
.transform
.0
.is_empty()
{
overflow.translate(&position.origin.to_vector());
return overflow;
}
// TODO: Take into account 3d transforms, even though it's a fairly
// uncommon case.
let transform_2d = self
.as_block()
.fragment
.transform_matrix(&position)
.unwrap_or(LayoutTransform::identity())
.to_2d()
.to_untyped();
let transformed_overflow = Overflow {
paint: f32_rect_to_au_rect(
transform_2d.transform_rect(&au_rect_to_f32_rect(overflow.paint)),
),
scroll: f32_rect_to_au_rect(
transform_2d.transform_rect(&au_rect_to_f32_rect(overflow.scroll)),
),
};
// TODO: We are taking the union of the overflow and transformed overflow here, which
// happened implicitly in the previous version of this code. This will probably be
// unnecessary once we are taking into account 3D transformations above.
overflow.union(&transformed_overflow);
overflow.translate(&position.origin.to_vector());
overflow
}
///
/// CSS Section 11.1
/// This is the union of rectangles of the flows for which we define the
/// Containing Block.
///
/// FIXME(pcwalton): This should not be a virtual method, but currently is due to a compiler
/// bug ("the trait `Sized` is not implemented for `self`").
///
/// Assumption: This is called in a bottom-up traversal, so kids' overflows have
/// already been set.
/// Assumption: Absolute descendants have had their overflow calculated.
fn store_overflow(&mut self, _: &LayoutContext) {
// Calculate overflow on a per-fragment basis.
let mut overflow = self.compute_overflow();
match self.class() {
FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {
for kid in self.mut_base().children.iter_mut() {
overflow.union(&kid.get_overflow_in_parent_coordinates());
}
},
_ => {},
}
self.mut_base().overflow = overflow
}
/// Phase 4 of reflow: Compute the stacking-relative position (origin of the content box,
/// in coordinates relative to the nearest ancestor stacking context).
fn compute_stacking_relative_position(&mut self, _: &LayoutContext) {
// The default implementation is a no-op.
}
/// Phase 5 of reflow: builds display lists.
fn build_display_list(&mut self, state: &mut DisplayListBuildState);
/// Returns the union of all overflow rects of all of this flow's fragments.
fn compute_overflow(&self) -> Overflow;
/// Iterates through border boxes of all of this flow's fragments.
/// Level provides a zero based index indicating the current
/// depth of the flow tree during fragment iteration.
fn iterate_through_fragment_border_boxes(
&self,
iterator: &mut dyn FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>,
);
/// Mutably iterates through fragments in this flow.
fn mutate_fragments(&mut self, mutator: &mut dyn FnMut(&mut Fragment));
fn compute_collapsible_block_start_margin(
&mut self,
_layout_context: &mut LayoutContext,
_margin_collapse_info: &mut MarginCollapseInfo,
) {
// The default implementation is a no-op.
}
/// Marks this flow as the root flow. The default implementation is a no-op.
fn mark_as_root(&mut self) {
debug!("called mark_as_root() on a flow of type {:?}", self.class());
panic!("called mark_as_root() on an unhandled flow");
}
// Note that the following functions are mostly called using static method
// dispatch, so it's ok to have them in this trait. Plus, they have
// different behaviour for different types of Flow, so they can't go into
// the Immutable / Mutable Flow Utils traits without additional casts.
fn is_root(&self) -> bool {
false
}
/// The 'position' property of this flow.
fn positioning(&self) -> Position {
Position::Static
}
/// Return true if this flow has position 'fixed'.
fn is_fixed(&self) -> bool {
self.positioning() == Position::Fixed
}
fn contains_positioned_fragments(&self) -> bool {
self.contains_relatively_positioned_fragments() || self
.base()
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED)
}
fn contains_relatively_positioned_fragments(&self) -> bool {
self.positioning() == Position::Relative
}
/// Returns true if this is an absolute containing block.
fn is_absolute_containing_block(&self) -> bool {
self.contains_positioned_fragments()
}
/// Returns true if this flow contains fragments that are roots of an absolute flow tree.
fn contains_roots_of_absolute_flow_tree(&self) -> bool {
self.contains_relatively_positioned_fragments() || self.is_root()
}
/// Updates the inline position of a child flow during the assign-height traversal. At present,
/// this is only used for absolutely-positioned inline-blocks.
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au);
/// Updates the block position of a child flow during the assign-height traversal. At present,
/// this is only used for absolutely-positioned inline-blocks.
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au);
/// Return the size of the containing block generated by this flow for the absolutely-
/// positioned descendant referenced by `for_flow`. For block flows, this is the padding box.
///
/// NB: Do not change this `&self` to `&mut self` under any circumstances! It has security
/// implications because this can be called on parents concurrently from descendants!
fn generated_containing_block_size(&self, _: OpaqueFlow) -> LogicalSize<Au>;
/// Attempts to perform incremental fixup of this flow by replacing its fragment's style with
/// the new style. This can only succeed if the flow has exactly one fragment.
fn repair_style(&mut self, new_style: &crate::ServoArc<ComputedValues>);
/// Print any extra children (such as fragments) contained in this Flow
/// for debugging purposes. Any items inserted into the tree will become
/// children of this flow.
fn print_extra_flow_children(&self, _: &mut PrintTree) {}
fn clipping_and_scrolling(&self) -> ClippingAndScrolling {
match self.base().clipping_and_scrolling {
Some(info) => info,
None => unreachable!("Tried to access scroll root id on Flow before assignment"),
}
}
}
pub trait ImmutableFlowUtils {
// Convenience functions
/// Returns true if this flow is a block flow or subclass thereof.
fn is_block_like(self) -> bool;
/// Returns true if this flow is a table flow.
fn is_table(self) -> bool;
/// Returns true if this flow is a table caption flow.
fn is_table_caption(self) -> bool;
/// Returns true if this flow is a proper table child.
fn is_proper_table_child(self) -> bool;
/// Returns true if this flow is a table row flow.
fn is_table_row(self) -> bool;
/// Returns true if this flow is a table cell flow.
fn is_table_cell(self) -> bool;
/// Returns true if this flow is a table colgroup flow.
fn is_table_colgroup(self) -> bool;
/// Returns true if this flow is a table rowgroup flow.
fn is_table_rowgroup(self) -> bool;
/// Returns true if this flow is one of table-related flows.
fn is_table_kind(self) -> bool;
/// Returns true if this flow has no children.
fn is_leaf(self) -> bool;
/// Returns the number of children that this flow possesses.
fn child_count(self) -> usize;
/// Return true if this flow is a Block Container.
fn is_block_container(self) -> bool;
/// Returns true if this flow is a block flow.
fn is_block_flow(self) -> bool;
/// Returns true if this flow is an inline flow.
fn is_inline_flow(self) -> bool;
/// Dumps the flow tree for debugging.
fn print(self, title: String);
/// Dumps the flow tree for debugging into the given PrintTree.
fn print_with_tree(self, print_tree: &mut PrintTree);
/// Returns true if floats might flow through this flow, as determined by the float placement
/// speculation pass.
fn floats_might_flow_through(self) -> bool;
fn baseline_offset_of_last_line_box_in_flow(self) -> Option<Au>;
}
pub trait MutableFlowUtils {
/// Calls `repair_style` and `bubble_inline_sizes`. You should use this method instead of
/// calling them individually, since there is no reason not to perform both operations.
fn repair_style_and_bubble_inline_sizes(self, style: &crate::ServoArc<ComputedValues>);
}
pub trait MutableOwnedFlowUtils {
/// Set absolute descendants for this flow.
///
/// Set this flow as the Containing Block for all the absolute descendants.
fn set_absolute_descendants(&mut self, abs_descendants: AbsoluteDescendants);
/// Sets the flow as the containing block for all absolute descendants that have been marked
/// as having reached their containing block. This is needed in order to handle cases like:
///
/// ```html
/// <div>
/// <span style="position: relative">
/// <span style="position: absolute; ..."></span>
/// </span>
/// </div>
/// ```
fn take_applicable_absolute_descendants(
&mut self,
absolute_descendants: &mut AbsoluteDescendants,
);
}
#[derive(Clone, Copy, Debug, PartialEq, Serialize)]
pub enum FlowClass {
Block,
Inline,
ListItem,
TableWrapper,
Table,
TableColGroup,
TableRowGroup,
TableRow,
TableCaption,
TableCell,
Multicol,
MulticolColumn,
Flex,
}
impl FlowClass {
fn is_block_like(self) -> bool {
match self {
FlowClass::Block |
FlowClass::ListItem |
FlowClass::Table |
FlowClass::TableRowGroup |
FlowClass::TableRow |
FlowClass::TableCaption |
FlowClass::TableCell |
FlowClass::TableWrapper |
FlowClass::Flex => true,
_ => false,
}
}
}
bitflags! {
#[doc = "Flags used in flows."]
pub struct FlowFlags: u32 {
// text align flags
#[doc = "Whether this flow is absolutely positioned. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const IS_ABSOLUTELY_POSITIONED = 0b0000_0000_0000_0000_0100_0000;
#[doc = "Whether this flow clears to the left. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const CLEARS_LEFT = 0b0000_0000_0000_0000_1000_0000;
#[doc = "Whether this flow clears to the right. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const CLEARS_RIGHT = 0b0000_0000_0000_0001_0000_0000;
#[doc = "Whether this flow is left-floated. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const FLOATS_LEFT = 0b0000_0000_0000_0010_0000_0000;
#[doc = "Whether this flow is right-floated. This is checked all over layout, so a"]
#[doc = "virtual call is too expensive."]
const FLOATS_RIGHT = 0b0000_0000_0000_0100_0000_0000;
#[doc = "Text alignment. \
NB: If you update this, update `TEXT_ALIGN_SHIFT` below."]
const TEXT_ALIGN = 0b0000_0000_0111_1000_0000_0000;
#[doc = "Whether this flow has a fragment with `counter-reset` or `counter-increment` \
styles."]
const AFFECTS_COUNTERS = 0b0000_0000_1000_0000_0000_0000;
#[doc = "Whether this flow's descendants have fragments that affect `counter-reset` or \
`counter-increment` styles."]
const HAS_COUNTER_AFFECTING_CHILDREN = 0b0000_0001_0000_0000_0000_0000;
#[doc = "Whether this flow behaves as though it had `position: static` for the purposes \
of positioning in the inline direction. This is set for flows with `position: \
static` and `position: relative` as well as absolutely-positioned flows with \
unconstrained positions in the inline direction."]
const INLINE_POSITION_IS_STATIC = 0b0000_0010_0000_0000_0000_0000;
#[doc = "Whether this flow behaves as though it had `position: static` for the purposes \
of positioning in the block direction. This is set for flows with `position: \
static` and `position: relative` as well as absolutely-positioned flows with \
unconstrained positions in the block direction."]
const BLOCK_POSITION_IS_STATIC = 0b0000_0100_0000_0000_0000_0000;
/// Whether any ancestor is a fragmentation container
const CAN_BE_FRAGMENTED = 0b0000_1000_0000_0000_0000_0000;
/// Whether this flow contains any text and/or replaced fragments.
const CONTAINS_TEXT_OR_REPLACED_FRAGMENTS = 0b0001_0000_0000_0000_0000_0000;
/// Whether margins are prohibited from collapsing with this flow.
const MARGINS_CANNOT_COLLAPSE = 0b0010_0000_0000_0000_0000_0000;
}
}
/// The number of bits we must shift off to handle the text alignment field.
///
/// NB: If you update this, update `TEXT_ALIGN` above.
static TEXT_ALIGN_SHIFT: usize = 11;
impl FlowFlags {
#[inline]
pub fn text_align(self) -> TextAlign {
TextAlign::from_u32((self & FlowFlags::TEXT_ALIGN).bits() >> TEXT_ALIGN_SHIFT).unwrap()
}
#[inline]
pub fn set_text_align(&mut self, value: TextAlign) {
*self = (*self & !FlowFlags::TEXT_ALIGN) |
FlowFlags::from_bits((value as u32) << TEXT_ALIGN_SHIFT).unwrap();
}
#[inline]
pub fn float_kind(&self) -> Float {
if self.contains(FlowFlags::FLOATS_LEFT) {
Float::Left
} else if self.contains(FlowFlags::FLOATS_RIGHT) {
Float::Right
} else {
Float::None
}
}
#[inline]
pub fn is_float(&self) -> bool {
self.contains(FlowFlags::FLOATS_LEFT) || self.contains(FlowFlags::FLOATS_RIGHT)
}
#[inline]
pub fn clears_floats(&self) -> bool {
self.contains(FlowFlags::CLEARS_LEFT) || self.contains(FlowFlags::CLEARS_RIGHT)
}
}
/// Absolutely-positioned descendants of this flow.
#[derive(Clone)]
pub struct AbsoluteDescendants {
/// Links to every descendant. This must be private because it is unsafe to leak `FlowRef`s to
/// layout.
descendant_links: Vec<AbsoluteDescendantInfo>,
}
impl AbsoluteDescendants {
pub fn new() -> AbsoluteDescendants {
AbsoluteDescendants {
descendant_links: Vec::new(),
}
}
pub fn len(&self) -> usize {
self.descendant_links.len()
}
pub fn is_empty(&self) -> bool {
self.descendant_links.is_empty()
}
pub fn push(&mut self, given_descendant: FlowRef) {
self.descendant_links.push(AbsoluteDescendantInfo {
flow: given_descendant,
has_reached_containing_block: false,
});
}
/// Push the given descendants on to the existing descendants.
///
/// Ignore any static y offsets, because they are None before layout.
pub fn push_descendants(&mut self, given_descendants: AbsoluteDescendants) {
for elem in given_descendants.descendant_links {
self.descendant_links.push(elem);
}
}
/// Return an iterator over the descendant flows.
pub fn iter(&mut self) -> AbsoluteDescendantIter {
AbsoluteDescendantIter {
iter: self.descendant_links.iter_mut(),
}
}
/// Mark these descendants as having reached their containing block.
pub fn mark_as_having_reached_containing_block(&mut self) {
for descendant_info in self.descendant_links.iter_mut() {
descendant_info.has_reached_containing_block = true
}
}
}
/// Information about each absolutely-positioned descendant of the given flow.
#[derive(Clone)]
pub struct AbsoluteDescendantInfo {
/// The absolute descendant flow in question.
flow: FlowRef,
/// Whether the absolute descendant has reached its containing block. This exists so that we
/// can handle cases like the following:
///
/// ```html
/// <div>
/// <span id=a style="position: absolute; ...">foo</span>
/// <span style="position: relative">
/// <span id=b style="position: absolute; ...">bar</span>
/// </span>
/// </div>
/// ```
///
/// When we go to create the `InlineFlow` for the outer `div`, our absolute descendants will
/// be `a` and `b`. At this point, we need a way to distinguish between the two, because the
/// containing block for `a` will be different from the containing block for `b`. Specifically,
/// the latter's containing block is the inline flow itself, while the former's containing
/// block is going to be some parent of the outer `div`. Hence we need this flag as a way to
/// distinguish the two; it will be false for `a` and true for `b`.
has_reached_containing_block: bool,
}
pub struct AbsoluteDescendantIter<'a> {
iter: IterMut<'a, AbsoluteDescendantInfo>,
}
impl<'a> Iterator for AbsoluteDescendantIter<'a> {
type Item = &'a mut dyn Flow;
fn next(&mut self) -> Option<&'a mut dyn Flow> {
self.iter
.next()
.map(|info| FlowRef::deref_mut(&mut info.flow))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
pub type AbsoluteDescendantOffsetIter<'a> = Zip<AbsoluteDescendantIter<'a>, IterMut<'a, Au>>;
/// Information needed to compute absolute (i.e. viewport-relative) flow positions (not to be
/// confused with absolutely-positioned flows) that is computed during block-size assignment.
#[derive(Clone, Copy)]
pub struct EarlyAbsolutePositionInfo {
/// The size of the containing block for relatively-positioned descendants.
pub relative_containing_block_size: LogicalSize<Au>,
/// The writing mode for `relative_containing_block_size`.
pub relative_containing_block_mode: WritingMode,
}
impl EarlyAbsolutePositionInfo {
pub fn new(writing_mode: WritingMode) -> EarlyAbsolutePositionInfo {
// FIXME(pcwalton): The initial relative containing block-size should be equal to the size
// of the root layer.
EarlyAbsolutePositionInfo {
relative_containing_block_size: LogicalSize::zero(writing_mode),
relative_containing_block_mode: writing_mode,
}
}
}
/// Information needed to compute absolute (i.e. viewport-relative) flow positions (not to be
/// confused with absolutely-positioned flows) that is computed during final position assignment.
#[derive(Clone, Copy, Serialize)]
pub struct LateAbsolutePositionInfo {
/// The position of the absolute containing block relative to the nearest ancestor stacking
/// context. If the absolute containing block establishes the stacking context for this flow,
/// and this flow is not itself absolutely-positioned, then this is (0, 0).
pub stacking_relative_position_of_absolute_containing_block: Point2D<Au>,
}
impl LateAbsolutePositionInfo {
pub fn new() -> LateAbsolutePositionInfo {
LateAbsolutePositionInfo {
stacking_relative_position_of_absolute_containing_block: Point2D::zero(),
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct FragmentationContext {
pub available_block_size: Au,
pub this_fragment_is_empty: bool,
}
/// Data common to all flows.
pub struct BaseFlow {
pub restyle_damage: RestyleDamage,
/// The children of this flow.
pub children: FlowList,
/// Intrinsic inline sizes for this flow.
pub intrinsic_inline_sizes: IntrinsicISizes,
/// The upper left corner of the box representing this flow, relative to the box representing
/// its parent flow.
///
/// For absolute flows, this represents the position with respect to its *containing block*.
///
/// This does not include margins in the block flow direction, because those can collapse. So
/// for the block direction (usually vertical), this represents the *border box*. For the
/// inline direction (usually horizontal), this represents the *margin box*.
pub position: LogicalRect<Au>,
/// The amount of overflow of this flow, relative to the containing block. Must include all the
/// pixels of all the display list items for correct invalidation.
pub overflow: Overflow,
/// Data used during parallel traversals.
///
/// TODO(pcwalton): Group with other transient data to save space.
pub parallel: FlowParallelInfo,
/// The floats next to this flow.
pub floats: Floats,
/// Metrics for floats in computed during the float metrics speculation phase.
pub speculated_float_placement_in: SpeculatedFloatPlacement,
/// Metrics for floats out computed during the float metrics speculation phase.
pub speculated_float_placement_out: SpeculatedFloatPlacement,
/// The collapsible margins for this flow, if any.
pub collapsible_margins: CollapsibleMargins,
/// The position of this flow relative to the start of the nearest ancestor stacking context.
/// This is computed during the top-down pass of display list construction.
pub stacking_relative_position: Vector2D<Au>,
/// Details about descendants with position 'absolute' or 'fixed' for which we are the
/// containing block. This is in tree order. This includes any direct children.
pub abs_descendants: AbsoluteDescendants,
/// The inline-size of the block container of this flow. Used for computing percentage and
/// automatic values for `width`.
pub block_container_inline_size: Au,
/// The writing mode of the block container of this flow.
///
/// FIXME (mbrubeck): Combine this and block_container_inline_size and maybe
/// block_container_explicit_block_size into a struct, to guarantee they are set at the same
/// time? Or just store a link to the containing block flow.
pub block_container_writing_mode: WritingMode,
/// The block-size of the block container of this flow, if it is an explicit size (does not
/// depend on content heights). Used for computing percentage values for `height`.
pub block_container_explicit_block_size: Option<Au>,
/// Reference to the Containing Block, if this flow is absolutely positioned.
pub absolute_cb: ContainingBlockLink,
/// Information needed to compute absolute (i.e. viewport-relative) flow positions (not to be
/// confused with absolutely-positioned flows) that is computed during block-size assignment.
pub early_absolute_position_info: EarlyAbsolutePositionInfo,
/// Information needed to compute absolute (i.e. viewport-relative) flow positions (not to be
/// confused with absolutely-positioned flows) that is computed during final position
/// assignment.
pub late_absolute_position_info: LateAbsolutePositionInfo,
/// The clipping rectangle for this flow and its descendants, in the coordinate system of the
/// nearest ancestor stacking context. If this flow itself represents a stacking context, then
/// this is in the flow's own coordinate system.
pub clip: Rect<Au>,
/// The writing mode for this flow.
pub writing_mode: WritingMode,
/// For debugging and profiling, the identifier of the thread that laid out this fragment.
pub thread_id: u8,
/// Various flags for flows, tightly packed to save space.
pub flags: FlowFlags,
/// The ID of the StackingContext that contains this flow. This is initialized
/// to 0, but it assigned during the collect_stacking_contexts phase of display
/// list construction.
pub stacking_context_id: StackingContextId,
/// The indices of this Flow's ClipScrollNode. This is used to place the node's
/// display items into scrolling frames and clipping nodes.
pub clipping_and_scrolling: Option<ClippingAndScrolling>,
}
impl fmt::Debug for BaseFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let child_count = self.parallel.children_count.load(Ordering::SeqCst);
let child_count_string = if child_count > 0 {
format!("\nchildren={}", child_count)
} else {
"".to_owned()
};
let absolute_descendants_string = if self.abs_descendants.len() > 0 {
format!("\nabs-descendents={}", self.abs_descendants.len())
} else {
"".to_owned()
};
let damage_string = if self.restyle_damage != RestyleDamage::empty() {
format!("\ndamage={:?}", self.restyle_damage)
} else {
"".to_owned()
};
write!(
f,
"\nsc={:?}\
\npos={:?}{}{}\
\nfloatspec-in={:?}\
\nfloatspec-out={:?}\
\noverflow={:?}{}{}{}",
self.stacking_context_id,
self.position,
if self.flags.contains(FlowFlags::FLOATS_LEFT) {
"FL"
} else {
""
},
if self.flags.contains(FlowFlags::FLOATS_RIGHT) {
"FR"
} else {
""
},
self.speculated_float_placement_in,
self.speculated_float_placement_out,
self.overflow,
child_count_string,
absolute_descendants_string,
damage_string
)
}
}
impl Serialize for BaseFlow {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let mut serializer = serializer.serialize_struct("base", 5)?;
serializer.serialize_field("id", &self.debug_id())?;
serializer.serialize_field(
"stacking_relative_position",
&self.stacking_relative_position,
)?;
serializer.serialize_field("intrinsic_inline_sizes", &self.intrinsic_inline_sizes)?;
serializer.serialize_field("position", &self.position)?;
serializer.serialize_field("children", &self.children)?;
serializer.end()
}
}
/// Whether a base flow should be forced to be nonfloated. This can affect e.g. `TableFlow`, which
/// is never floated because the table wrapper flow is the floated one.
#[derive(Clone, PartialEq)]
pub enum ForceNonfloatedFlag {
/// The flow should be floated if the node has a `float` property.
FloatIfNecessary,
/// The flow should be forced to be nonfloated.
ForceNonfloated,
}
impl BaseFlow {
#[inline]
pub fn new(
style: Option<&ComputedValues>,
writing_mode: WritingMode,
force_nonfloated: ForceNonfloatedFlag,
) -> BaseFlow {
let mut flags = FlowFlags::empty();
match style {
Some(style) => {
if style.can_be_fragmented() {
flags.insert(FlowFlags::CAN_BE_FRAGMENTED);
}
match style.get_box().position {
Position::Absolute | Position::Fixed => {
flags.insert(FlowFlags::IS_ABSOLUTELY_POSITIONED);
let logical_position = style.logical_position();
if logical_position.inline_start == LengthOrPercentageOrAuto::Auto &&
logical_position.inline_end == LengthOrPercentageOrAuto::Auto
{
flags.insert(FlowFlags::INLINE_POSITION_IS_STATIC);
}
if logical_position.block_start == LengthOrPercentageOrAuto::Auto &&
logical_position.block_end == LengthOrPercentageOrAuto::Auto
{
flags.insert(FlowFlags::BLOCK_POSITION_IS_STATIC);
}
},
_ => flags.insert(
FlowFlags::BLOCK_POSITION_IS_STATIC | FlowFlags::INLINE_POSITION_IS_STATIC,
),
}
if force_nonfloated == ForceNonfloatedFlag::FloatIfNecessary {
match style.get_box().float {
Float::None => {},
Float::Left => flags.insert(FlowFlags::FLOATS_LEFT),
Float::Right => flags.insert(FlowFlags::FLOATS_RIGHT),
}
}
match style.get_box().clear {
Clear::None => {},
Clear::Left => flags.insert(FlowFlags::CLEARS_LEFT),
Clear::Right => flags.insert(FlowFlags::CLEARS_RIGHT),
Clear::Both => {
flags.insert(FlowFlags::CLEARS_LEFT);
flags.insert(FlowFlags::CLEARS_RIGHT);
},
}
if !style.get_counters().counter_reset.is_empty() ||
!style.get_counters().counter_increment.is_empty()
{
flags.insert(FlowFlags::AFFECTS_COUNTERS)
}
},
None => flags
.insert(FlowFlags::BLOCK_POSITION_IS_STATIC | FlowFlags::INLINE_POSITION_IS_STATIC),
}
// New flows start out as fully damaged.
let mut damage = RestyleDamage::rebuild_and_reflow();
damage.remove(ServoRestyleDamage::RECONSTRUCT_FLOW);
BaseFlow {
restyle_damage: damage,
children: FlowList::new(),
intrinsic_inline_sizes: IntrinsicISizes::new(),
position: LogicalRect::zero(writing_mode),
overflow: Overflow::new(),
parallel: FlowParallelInfo::new(),
floats: Floats::new(writing_mode),
collapsible_margins: CollapsibleMargins::new(),
stacking_relative_position: Vector2D::zero(),
abs_descendants: AbsoluteDescendants::new(),
speculated_float_placement_in: SpeculatedFloatPlacement::zero(),
speculated_float_placement_out: SpeculatedFloatPlacement::zero(),
block_container_inline_size: Au(0),
block_container_writing_mode: writing_mode,
block_container_explicit_block_size: None,
absolute_cb: ContainingBlockLink::new(),
early_absolute_position_info: EarlyAbsolutePositionInfo::new(writing_mode),
late_absolute_position_info: LateAbsolutePositionInfo::new(),
clip: MaxRect::max_rect(),
flags: flags,
writing_mode: writing_mode,
thread_id: 0,
stacking_context_id: StackingContextId::root(),
clipping_and_scrolling: None,
}
}
/// Update the 'flags' field when computed styles have changed.
///
/// These flags are initially set during flow construction. They only need to be updated here
/// if they are based on properties that can change without triggering `RECONSTRUCT_FLOW`.
pub fn update_flags_if_needed(&mut self, style: &ComputedValues) {
// For absolutely-positioned flows, changes to top/bottom/left/right can cause these flags
// to get out of date:
if self
.restyle_damage
.contains(ServoRestyleDamage::REFLOW_OUT_OF_FLOW)
{
// Note: We don't need to check whether IS_ABSOLUTELY_POSITIONED has changed, because
// changes to the 'position' property trigger flow reconstruction.
if self.flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED) {
let logical_position = style.logical_position();<|fim▁hole|> logical_position.inline_end == LengthOrPercentageOrAuto::Auto,
);
self.flags.set(
FlowFlags::BLOCK_POSITION_IS_STATIC,
logical_position.block_start == LengthOrPercentageOrAuto::Auto &&
logical_position.block_end == LengthOrPercentageOrAuto::Auto,
);
}
}
}
/// Return a new BaseFlow like this one but with the given children list
pub fn clone_with_children(&self, children: FlowList) -> BaseFlow {
BaseFlow {
children: children,
restyle_damage: self.restyle_damage |
ServoRestyleDamage::REPAINT |
ServoRestyleDamage::REFLOW_OUT_OF_FLOW |
ServoRestyleDamage::REFLOW,
parallel: FlowParallelInfo::new(),
floats: self.floats.clone(),
abs_descendants: self.abs_descendants.clone(),
absolute_cb: self.absolute_cb.clone(),
clip: self.clip.clone(),
..*self
}
}
/// Iterates over the children of this immutable flow.
pub fn child_iter(&self) -> FlowListIterator {
self.children.iter()
}
pub fn child_iter_mut(&mut self) -> MutFlowListIterator {
self.children.iter_mut()
}
pub fn debug_id(&self) -> usize {
let p = self as *const _;
p as usize
}
pub fn flow_id(&self) -> usize {
return self as *const BaseFlow as usize;
}
pub fn collect_stacking_contexts_for_children(
&mut self,
state: &mut StackingContextCollectionState,
) {
for kid in self.children.iter_mut() {
kid.collect_stacking_contexts(state);
}
}
#[inline]
pub fn might_have_floats_in(&self) -> bool {
self.speculated_float_placement_in.left > Au(0) ||
self.speculated_float_placement_in.right > Au(0)
}
#[inline]
pub fn might_have_floats_out(&self) -> bool {
self.speculated_float_placement_out.left > Au(0) ||
self.speculated_float_placement_out.right > Au(0)
}
/// Compute the fragment position relative to the parent stacking context. If the fragment
/// itself establishes a stacking context, then the origin of its position will be (0, 0)
/// for the purposes of this computation.
pub fn stacking_relative_border_box_for_display_list(&self, fragment: &Fragment) -> Rect<Au> {
fragment.stacking_relative_border_box(
&self.stacking_relative_position,
&self
.early_absolute_position_info
.relative_containing_block_size,
self.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Own,
)
}
}
impl<'a> ImmutableFlowUtils for &'a dyn Flow {
/// Returns true if this flow is a block flow or subclass thereof.
fn is_block_like(self) -> bool {
self.class().is_block_like()
}
/// Returns true if this flow is a proper table child.
/// 'Proper table child' is defined as table-row flow, table-rowgroup flow,
/// table-column-group flow, or table-caption flow.
fn is_proper_table_child(self) -> bool {
match self.class() {
FlowClass::TableRow |
FlowClass::TableRowGroup |
FlowClass::TableColGroup |
FlowClass::TableCaption => true,
_ => false,
}
}
/// Returns true if this flow is a table row flow.
fn is_table_row(self) -> bool {
match self.class() {
FlowClass::TableRow => true,
_ => false,
}
}
/// Returns true if this flow is a table cell flow.
fn is_table_cell(self) -> bool {
match self.class() {
FlowClass::TableCell => true,
_ => false,
}
}
/// Returns true if this flow is a table colgroup flow.
fn is_table_colgroup(self) -> bool {
match self.class() {
FlowClass::TableColGroup => true,
_ => false,
}
}
/// Returns true if this flow is a table flow.
fn is_table(self) -> bool {
match self.class() {
FlowClass::Table => true,
_ => false,
}
}
/// Returns true if this flow is a table caption flow.
fn is_table_caption(self) -> bool {
match self.class() {
FlowClass::TableCaption => true,
_ => false,
}
}
/// Returns true if this flow is a table rowgroup flow.
fn is_table_rowgroup(self) -> bool {
match self.class() {
FlowClass::TableRowGroup => true,
_ => false,
}
}
/// Returns true if this flow is one of table-related flows.
fn is_table_kind(self) -> bool {
match self.class() {
FlowClass::TableWrapper |
FlowClass::Table |
FlowClass::TableColGroup |
FlowClass::TableRowGroup |
FlowClass::TableRow |
FlowClass::TableCaption |
FlowClass::TableCell => true,
_ => false,
}
}
/// Returns true if this flow has no children.
fn is_leaf(self) -> bool {
self.base().children.is_empty()
}
/// Returns the number of children that this flow possesses.
fn child_count(self) -> usize {
self.base().children.len()
}
/// Return true if this flow is a Block Container.
///
/// Except for table fragments and replaced elements, block-level fragments (`BlockFlow`) are
/// also block container fragments.
/// Non-replaced inline blocks and non-replaced table cells are also block
/// containers.
fn is_block_container(self) -> bool {
match self.class() {
// TODO: Change this when inline-blocks are supported.
FlowClass::Block | FlowClass::TableCaption | FlowClass::TableCell => {
// FIXME: Actually check the type of the node
self.child_count() != 0
},
_ => false,
}
}
/// Returns true if this flow is a block flow.
fn is_block_flow(self) -> bool {
match self.class() {
FlowClass::Block => true,
_ => false,
}
}
/// Returns true if this flow is an inline flow.
fn is_inline_flow(self) -> bool {
match self.class() {
FlowClass::Inline => true,
_ => false,
}
}
/// Dumps the flow tree for debugging.
fn print(self, title: String) {
let mut print_tree = PrintTree::new(title);
self.print_with_tree(&mut print_tree);
}
/// Dumps the flow tree for debugging into the given PrintTree.
fn print_with_tree(self, print_tree: &mut PrintTree) {
print_tree.new_level(format!("{:?}", self));
self.print_extra_flow_children(print_tree);
for kid in self.base().child_iter() {
kid.print_with_tree(print_tree);
}
print_tree.end_level();
}
fn floats_might_flow_through(self) -> bool {
if !self.base().might_have_floats_in() && !self.base().might_have_floats_out() {
return false;
}
if self.is_root() {
return false;
}
if !self.is_block_like() {
return true;
}
self.as_block().formatting_context_type() == FormattingContextType::None
}
fn baseline_offset_of_last_line_box_in_flow(self) -> Option<Au> {
for kid in self.base().children.iter().rev() {
if kid.is_inline_flow() {
if let Some(baseline_offset) = kid.as_inline().baseline_offset_of_last_line() {
return Some(kid.base().position.start.b + baseline_offset);
}
}
if kid.is_block_like() && !kid
.base()
.flags
.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED)
{
if let Some(baseline_offset) = kid.baseline_offset_of_last_line_box_in_flow() {
return Some(kid.base().position.start.b + baseline_offset);
}
}
}
None
}
}
impl<'a> MutableFlowUtils for &'a mut dyn Flow {
/// Calls `repair_style` and `bubble_inline_sizes`. You should use this method instead of
/// calling them individually, since there is no reason not to perform both operations.
fn repair_style_and_bubble_inline_sizes(self, style: &crate::ServoArc<ComputedValues>) {
self.repair_style(style);
self.mut_base().update_flags_if_needed(style);
self.bubble_inline_sizes();
}
}
impl MutableOwnedFlowUtils for FlowRef {
/// Set absolute descendants for this flow.
///
/// Set yourself as the Containing Block for all the absolute descendants.
///
/// This is called during flow construction, so nothing else can be accessing the descendant
/// flows. This is enforced by the fact that we have a mutable `FlowRef`, which only flow
/// construction is allowed to possess.
fn set_absolute_descendants(&mut self, abs_descendants: AbsoluteDescendants) {
let this = self.clone();
let base = FlowRef::deref_mut(self).mut_base();
base.abs_descendants = abs_descendants;
for descendant_link in base.abs_descendants.descendant_links.iter_mut() {
debug_assert!(!descendant_link.has_reached_containing_block);
let descendant_base = FlowRef::deref_mut(&mut descendant_link.flow).mut_base();
descendant_base.absolute_cb.set(this.clone());
}
}
/// Sets the flow as the containing block for all absolute descendants that have been marked
/// as having reached their containing block. This is needed in order to handle cases like:
///
/// ```html
/// <div>
/// <span style="position: relative">
/// <span style="position: absolute; ..."></span>
/// </span>
/// </div>
/// ```
fn take_applicable_absolute_descendants(
&mut self,
absolute_descendants: &mut AbsoluteDescendants,
) {
let mut applicable_absolute_descendants = AbsoluteDescendants::new();
for absolute_descendant in absolute_descendants.descendant_links.iter() {
if absolute_descendant.has_reached_containing_block {
applicable_absolute_descendants.push(absolute_descendant.flow.clone());
}
}
absolute_descendants
.descendant_links
.retain(|descendant| !descendant.has_reached_containing_block);
let this = self.clone();
let base = FlowRef::deref_mut(self).mut_base();
base.abs_descendants = applicable_absolute_descendants;
for descendant_link in base.abs_descendants.iter() {
let descendant_base = descendant_link.mut_base();
descendant_base.absolute_cb.set(this.clone());
}
}
}
/// A link to a flow's containing block.
///
/// This cannot safely be a `Flow` pointer because this is a pointer *up* the tree, not *down* the
/// tree. A pointer up the tree is unsafe during layout because it can be used to access a node
/// with an immutable reference while that same node is being laid out, causing possible iterator
/// invalidation and use-after-free.
///
/// FIXME(pcwalton): I think this would be better with a borrow flag instead of `unsafe`.
#[derive(Clone)]
pub struct ContainingBlockLink {
/// The pointer up to the containing block.
link: Option<WeakFlowRef>,
}
impl ContainingBlockLink {
fn new() -> ContainingBlockLink {
ContainingBlockLink { link: None }
}
fn set(&mut self, link: FlowRef) {
self.link = Some(FlowRef::downgrade(&link))
}
#[inline]
pub fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> {
match self.link {
None => panic!(
"Link to containing block not established; perhaps you forgot to call \
`set_absolute_descendants`?"
),
Some(ref link) => {
let flow = link.upgrade().unwrap();
flow.generated_containing_block_size(for_flow)
},
}
}
#[inline]
pub fn explicit_block_containing_size(
&self,
shared_context: &SharedStyleContext,
) -> Option<Au> {
match self.link {
None => panic!(
"Link to containing block not established; perhaps you forgot to call \
`set_absolute_descendants`?"
),
Some(ref link) => {
let flow = link.upgrade().unwrap();
if flow.is_block_like() {
flow.as_block()
.explicit_block_containing_size(shared_context)
} else if flow.is_inline_flow() {
Some(flow.as_inline().minimum_line_metrics.space_above_baseline)
} else {
None
}
},
}
}
}
/// A wrapper for the pointer address of a flow. These pointer addresses may only be compared for
/// equality with other such pointer addresses, never dereferenced.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct OpaqueFlow(pub usize);
impl OpaqueFlow {
pub fn from_flow(flow: &dyn Flow) -> OpaqueFlow {
let object_ptr: *const dyn Flow = flow;
let data_ptr = object_ptr as *const ();
OpaqueFlow(data_ptr as usize)
}
}<|fim▁end|>
|
self.flags.set(
FlowFlags::INLINE_POSITION_IS_STATIC,
logical_position.inline_start == LengthOrPercentageOrAuto::Auto &&
|
<|file_name|>sconstest-singleStringCannotBeMultipleOptions_dmd.py<|end_file_name|><|fim▁begin|>"""
Test compiling and executing using the dmd tool.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION<|fim▁hole|>
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
from Common.singleStringCannotBeMultipleOptions import testForTool
testForTool('dmd')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
|
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
|
<|file_name|>basic.rs<|end_file_name|><|fim▁begin|>#![feature(plugin)]
#![plugin(rustc_info)]
fn main() {
println!("This project was compiled with rustc release {} ({} {}) for {} target",
rustc_release!(),
rustc_commit_hash!(),
rustc_commit_date!(), <|fim▁hole|>}<|fim▁end|>
|
rustc_host!());
|
<|file_name|>org-groups.js<|end_file_name|><|fim▁begin|>import React, { Component, Fragment } from 'react';
import { navigate } from '@reach/router';
import PropTypes from 'prop-types';
import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap';
import { siteRoot, gettext, orgID } from '../../utils/constants';
import { seafileAPI } from '../../utils/seafile-api';
import { Utils } from '../../utils/utils';
import toaster from '../../components/toast';
import OrgGroupInfo from '../../models/org-group';
import MainPanelTopbar from './main-panel-topbar';
class Search extends React.Component {
constructor(props) {
super(props);
this.state = {
value: ''
};
}
handleInputChange = (e) => {
this.setState({
value: e.target.value
});
}
handleKeyPress = (e) => {
if (e.key == 'Enter') {
e.preventDefault();
this.handleSubmit();
}
}
handleSubmit = () => {
const value = this.state.value.trim();
if (!value) {
return false;
}
this.props.submit(value);
}
render() {
return (
<div className="input-icon">
<i className="d-flex input-icon-addon fas fa-search"></i>
<input
type="text"
className="form-control search-input h-6 mr-1"
style={{width: '15rem'}}
placeholder={this.props.placeholder}
value={this.state.value}
onChange={this.handleInputChange}
onKeyPress={this.handleKeyPress}
autoComplete="off"
/><|fim▁hole|> );
}
}
class OrgGroups extends Component {
constructor(props) {
super(props);
this.state = {
page: 1,
pageNext: false,
orgGroups: [],
isItemFreezed: false
};
}
componentDidMount() {
let page = this.state.page;
this.initData(page);
}
initData = (page) => {
seafileAPI.orgAdminListOrgGroups(orgID, page).then(res => {
let orgGroups = res.data.groups.map(item => {
return new OrgGroupInfo(item);
});
this.setState({
orgGroups: orgGroups,
pageNext: res.data.page_next,
page: res.data.page,
});
}).catch(error => {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
});
}
onChangePageNum = (e, num) => {
e.preventDefault();
let page = this.state.page;
if (num == 1) {
page = page + 1;
} else {
page = page - 1;
}
this.initData(page);
}
onFreezedItem = () => {
this.setState({isItemFreezed: true});
}
onUnfreezedItem = () => {
this.setState({isItemFreezed: false});
}
deleteGroupItem = (group) => {
seafileAPI.orgAdminDeleteOrgGroup(orgID, group.id).then(res => {
this.setState({
orgGroups: this.state.orgGroups.filter(item => item.id != group.id)
});
let msg = gettext('Successfully deleted {name}');
msg = msg.replace('{name}', group.groupName);
toaster.success(msg);
}).catch(error => {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
});
}
searchItems = (keyword) => {
navigate(`${siteRoot}org/groupadmin/search-groups/?query=${encodeURIComponent(keyword)}`);
}
getSearch = () => {
return <Search
placeholder={gettext('Search groups by name')}
submit={this.searchItems}
/>;
}
render() {
let groups = this.state.orgGroups;
return (
<Fragment>
<MainPanelTopbar search={this.getSearch()}/>
<div className="main-panel-center flex-row">
<div className="cur-view-container">
<div className="cur-view-path">
<h3 className="sf-heading">{gettext('All Groups')}</h3>
</div>
<div className="cur-view-content">
<table>
<thead>
<tr>
<th width="30%">{gettext('Name')}</th>
<th width="35%">{gettext('Creator')}</th>
<th width="23%">{gettext('Created At')}</th>
<th width="12%" className="text-center">{gettext('Operations')}</th>
</tr>
</thead>
<tbody>
{groups.map(item => {
return (
<GroupItem
key={item.id}
group={item}
isItemFreezed={this.state.isItemFreezed}
onFreezedItem={this.onFreezedItem}
onUnfreezedItem={this.onUnfreezedItem}
deleteGroupItem={this.deleteGroupItem}
/>
);
})}
</tbody>
</table>
<div className="paginator">
{this.state.page != 1 && <a href="#" onClick={(e) => this.onChangePageNum(e, -1)}>{gettext('Previous')}</a>}
{(this.state.page != 1 && this.state.pageNext) && <span> | </span>}
{this.state.pageNext && <a href="#" onClick={(e) => this.onChangePageNum(e, 1)}>{gettext('Next')}</a>}
</div>
</div>
</div>
</div>
</Fragment>
);
}
}
const GroupItemPropTypes = {
group: PropTypes.object.isRequired,
isItemFreezed: PropTypes.bool.isRequired,
onFreezedItem: PropTypes.func.isRequired,
onUnfreezedItem: PropTypes.func.isRequired,
deleteGroupItem: PropTypes.func.isRequired,
};
class GroupItem extends React.Component {
constructor(props) {
super(props);
this.state = {
highlight: false,
showMenu: false,
isItemMenuShow: false
};
}
onMouseEnter = () => {
if (!this.props.isItemFreezed) {
this.setState({
showMenu: true,
highlight: true,
});
}
}
onMouseLeave = () => {
if (!this.props.isItemFreezed) {
this.setState({
showMenu: false,
highlight: false
});
}
}
onDropdownToggleClick = (e) => {
e.preventDefault();
this.toggleOperationMenu(e);
}
toggleOperationMenu = (e) => {
e.stopPropagation();
this.setState(
{isItemMenuShow: !this.state.isItemMenuShow }, () => {
if (this.state.isItemMenuShow) {
this.props.onFreezedItem();
} else {
this.setState({
highlight: false,
showMenu: false,
});
this.props.onUnfreezedItem();
}
}
);
}
toggleDelete = () => {
this.props.deleteGroupItem(this.props.group);
}
renderGroupHref = (group) => {
let groupInfoHref;
if (group.creatorName == 'system admin') {
groupInfoHref = siteRoot + 'org/departmentadmin/groups/' + group.id + '/';
} else {
groupInfoHref = siteRoot + 'org/groupadmin/' + group.id + '/';
}
return groupInfoHref;
}
renderGroupCreator = (group) => {
let userInfoHref = siteRoot + 'org/useradmin/info/' + group.creatorEmail + '/';
if (group.creatorName == 'system admin') {
return (
<td> -- </td>
);
} else {
return(
<td>
<a href={userInfoHref} className="font-weight-normal">{group.creatorName}</a>
</td>
);
}
}
render() {
let { group } = this.props;
let isOperationMenuShow = (group.creatorName != 'system admin') && this.state.showMenu;
return (
<tr className={this.state.highlight ? 'tr-highlight' : ''} onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave}>
<td>
<a href={this.renderGroupHref(group)} className="font-weight-normal">{group.groupName}</a>
</td>
{this.renderGroupCreator(group)}
<td>{group.ctime}</td>
<td className="text-center cursor-pointer">
{isOperationMenuShow &&
<Dropdown isOpen={this.state.isItemMenuShow} toggle={this.toggleOperationMenu}>
<DropdownToggle
tag="a"
className="attr-action-icon fas fa-ellipsis-v"
title={gettext('More Operations')}
data-toggle="dropdown"
aria-expanded={this.state.isItemMenuShow}
onClick={this.onDropdownToggleClick}
/>
<DropdownMenu>
<DropdownItem onClick={this.toggleDelete}>{gettext('Delete')}</DropdownItem>
</DropdownMenu>
</Dropdown>
}
</td>
</tr>
);
}
}
GroupItem.propTypes = GroupItemPropTypes;
export default OrgGroups;<|fim▁end|>
|
</div>
|
<|file_name|>__DangerfileAsync.ts<|end_file_name|><|fim▁begin|>const asyncAction = () =>
new Promise(res => {
setTimeout(() => {<|fim▁hole|>
schedule(async () => {
await asyncAction()
warn("After Async Function")
})<|fim▁end|>
|
warn("Async Function")
res()
}, 50)
})
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
declare module 'json-gate'
|
<|file_name|>OpenshiftBuildConfigsProducer.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.<|fim▁hole|> * (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openshift.build_configs;
import java.util.Map;
import io.fabric8.kubernetes.client.Watch;
import io.fabric8.kubernetes.client.dsl.FilterWatchListMultiDeletable;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigList;
import io.fabric8.openshift.api.model.DoneableBuildConfig;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.openshift.client.dsl.BuildConfigResource;
import org.apache.camel.Exchange;
import org.apache.camel.component.kubernetes.AbstractKubernetesEndpoint;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesOperations;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OpenshiftBuildConfigsProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(OpenshiftBuildConfigsProducer.class);
public OpenshiftBuildConfigsProducer(AbstractKubernetesEndpoint endpoint) {
super(endpoint);
}
@Override
public AbstractKubernetesEndpoint getEndpoint() {
return (AbstractKubernetesEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String operation;
if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration().getOperation())) {
operation = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_OPERATION, String.class);
} else {
operation = getEndpoint().getKubernetesConfiguration().getOperation();
}
switch (operation) {
case KubernetesOperations.LIST_BUILD_CONFIGS:
doList(exchange, operation);
break;
case KubernetesOperations.LIST_BUILD_CONFIGS_BY_LABELS_OPERATION:
doListBuildConfigsByLabels(exchange, operation);
break;
case KubernetesOperations.GET_BUILD_CONFIG_OPERATION:
doGetBuildConfig(exchange, operation);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
protected void doList(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace().list();
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doListBuildConfigsByLabels(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList = null;
Map<String, String> labels = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIGS_LABELS, Map.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
NonNamespaceOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> buildConfigs;
buildConfigs = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs()
.inNamespace(namespaceName);
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
} else {
FilterWatchListMultiDeletable<BuildConfig, BuildConfigList, Boolean, Watch> buildConfigs
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace();
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
}
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doGetBuildConfig(Exchange exchange, String operation) throws Exception {
BuildConfig buildConfig = null;
String buildConfigName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIG_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(buildConfigName)) {
LOG.error("Get a specific Build Config require specify a Build Config name");
throw new IllegalArgumentException("Get a specific Build Config require specify a Build Config name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Get a specific Build Config require specify a namespace name");
throw new IllegalArgumentException("Get a specific Build Config require specify a namespace name");
}
buildConfig = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inNamespace(namespaceName)
.withName(buildConfigName).get();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfig);
}
}<|fim▁end|>
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import card
from card import Card
from player import Player
from hand import Hand
from prompt import Prompt, IntegerPrompt, SetPrompt
import pprint
class Setup:
def run(self, game):
self.game = game
self.cards_accounted_for = 0
self.setup_conviction()
self.initialize_cards()
self.setup_me()
self.setup_opponents()
self.setup_my_cards()
def setup_conviction(self):
self.game.conviction = Hand(card.COUNT_TYPES, game=self.game)
self.game.hands.add(self.game.conviction)
self.cards_accounted_for += card.COUNT_TYPES
def initialize_cards(self):
self.game.cards.add(Card(card.TYPE_ROOM, 'Lounge'))
self.game.cards.add(Card(card.TYPE_ROOM, 'Dining Room'))
self.game.cards.add(Card(card.TYPE_ROOM, 'Kitchen'))<|fim▁hole|> self.game.cards.add(Card(card.TYPE_ROOM, 'Library'))
self.game.cards.add(Card(card.TYPE_ROOM, 'Study'))
self.game.cards.add(Card(card.TYPE_ROOM, 'Hall'))
self.game.cards.add(Card(card.TYPE_PERSON, 'Miss Scarlett'))
self.game.cards.add(Card(card.TYPE_PERSON, 'Coloniel Mustard'))
self.game.cards.add(Card(card.TYPE_PERSON, 'Misses White'))
self.game.cards.add(Card(card.TYPE_PERSON, 'Mister Green'))
self.game.cards.add(Card(card.TYPE_PERSON, 'Misses Peacock'))
self.game.cards.add(Card(card.TYPE_PERSON, 'Professor Plumb'))
self.game.cards.add(Card(card.TYPE_WEAPON, 'Lead Pipe'))
self.game.cards.add(Card(card.TYPE_WEAPON, 'Wrench'))
self.game.cards.add(Card(card.TYPE_WEAPON, 'Knife'))
self.game.cards.add(Card(card.TYPE_WEAPON, 'Revolver'))
self.game.cards.add(Card(card.TYPE_WEAPON, 'Candlestick'))
self.game.cards.add(Card(card.TYPE_WEAPON, 'Rope'))
def setup_me(self):
name = self.game.prompt(Prompt('Your name:'))
card_count = self.game.prompt(IntegerPrompt('Count your cards:', len(self.game.cards) - self.cards_accounted_for))
player = Player(name, Hand(card_count, game=self.game))
self.game.hands.add(player.hand)
self.game.me = player
self.game.players.add(player)
self.cards_accounted_for += card_count
def setup_opponents(self):
while self.cards_accounted_for < len(self.game.cards):
cards_left = len(self.game.cards) - self.cards_accounted_for
name = self.game.prompt(Prompt('Opponent name:'))
card_count = self.game.prompt(IntegerPrompt(
'Cards held by {} ({} left):'.format(
name,
cards_left
),
cards_left
))
player = Player(name, Hand(card_count, game=self.game))
self.game.hands.add(player.hand)
self.game.players.add(player)
self.cards_accounted_for += card_count
def setup_my_cards(self):
while len(self.game.me.hand.has_set) < self.game.me.hand.count:
self.game.me.hand.has(self.game.prompt(SetPrompt('Your card:', self.game.cards, exclude=self.game.me.hand.has_set)))<|fim▁end|>
|
self.game.cards.add(Card(card.TYPE_ROOM, 'Ballroom'))
self.game.cards.add(Card(card.TYPE_ROOM, 'Conservatory'))
self.game.cards.add(Card(card.TYPE_ROOM, 'Billiard Room'))
|
<|file_name|>test_mapping_index_converter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
# Copyright (C) Alibaba Cloud Computing
# All rights reserved.
import unittest
from aliyun.log.es_migration.mapping_index_converter import \<|fim▁hole|>
class TestMappingIndexConverter(unittest.TestCase):
def test_to_index_config(self):
mapping = {
"properties": {
"es_text": {
"type": "text"
},
"es_keyword": {
"type": "keyword"
},
"es_long": {
"type": "long"
},
"es_integer": {
"type": "integer"
},
"es_short": {
"type": "short"
},
"es_byte": {
"type": "byte"
},
"es_double": {
"type": "double"
},
"es_float": {
"type": "float"
},
"es_half_float": {
"type": "half_float"
},
"es_scaled_float": {
"type": "scaled_float",
"scaling_factor": 100
},
"es_date": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"
},
"es_boolean": {
"type": "boolean"
},
"es_binary": {
"type": "binary"
},
"es_integer_range": {
"type": "integer_range"
},
"es_float_range": {
"type": "float_range"
},
"es_long_range": {
"type": "long_range"
},
"es_double_range": {
"type": "double_range"
},
"es_date_range": {
"type": "date_range",
"format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"
},
"es_ip_range": {
"type": "ip_range"
},
"es_object": {
"properties": {
"sub_text": {"type": "text"},
"sub_long": {"type": "long"},
"sub_double": {"type": "double"},
"sub_boolean": {"type": "boolean"},
"sub_date": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"
},
"sub_byte": {"type": "byte"},
"sub_double_range": {
"type": "double_range"
},
"sub_object": {
"properties": {
"sub_text": {"type": "text"},
"sub_boolean": {"type": "boolean"}
}
}
}
},
"es_geo_point": {
"type": "geo_point"
},
"es_geo_shape": {
"type": "geo_shape"
}
}
}
index_config = MappingIndexConverter.to_index_config(mapping)
line_config = index_config.line_config
self.assertEqual(MappingIndexConverter.DEFAULT_TOKEN_LIST, line_config.token_list)
self.assertTrue(line_config.chn)
def test_to_index_config_with_none(self):
index_config = MappingIndexConverter.to_index_config(None)
self.assertEqual(None, index_config)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
MappingIndexConverter
|
<|file_name|>EDPartialAdmissionForDischargeDetailOutcomeVoAssembler.java<|end_file_name|><|fim▁begin|>//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
/*
* This code was generated
* Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved.
* IMS Development Environment (version 1.80 build 5589.25814)
* WARNING: DO NOT MODIFY the content of this file
* Generated on 12/10/2015, 13:25
*
*/
package ims.emergency.vo.domain;
import ims.vo.domain.DomainObjectMap;
import java.util.HashMap;
import org.hibernate.proxy.HibernateProxy;
/**
* @author Florin Blindu
*/
public class EDPartialAdmissionForDischargeDetailOutcomeVoAssembler
{
/**
* Copy one ValueObject to another
* @param valueObjectDest to be updated
* @param valueObjectSrc to copy values from
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo copy(ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObjectDest, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObjectSrc)
{
if (null == valueObjectSrc)
{
return valueObjectSrc;
}
valueObjectDest.setID_EDPartialAdmission(valueObjectSrc.getID_EDPartialAdmission());
valueObjectDest.setIsRIE(valueObjectSrc.getIsRIE());
// DecisionToAdmitDateTime
valueObjectDest.setDecisionToAdmitDateTime(valueObjectSrc.getDecisionToAdmitDateTime());
// Specialty
valueObjectDest.setSpecialty(valueObjectSrc.getSpecialty());
// AllocatedStatus
valueObjectDest.setAllocatedStatus(valueObjectSrc.getAllocatedStatus());
// AllocatedBedType
valueObjectDest.setAllocatedBedType(valueObjectSrc.getAllocatedBedType());
// AuthoringInfo
valueObjectDest.setAuthoringInfo(valueObjectSrc.getAuthoringInfo());
// AllocatedDateTime
valueObjectDest.setAllocatedDateTime(valueObjectSrc.getAllocatedDateTime());
// AdmittingConsultant
valueObjectDest.setAdmittingConsultant(valueObjectSrc.getAdmittingConsultant());
// AccomodationRequestedType
valueObjectDest.setAccomodationRequestedType(valueObjectSrc.getAccomodationRequestedType());
return valueObjectDest;
}
/**
* Create the ValueObject collection to hold the set of DomainObjects.
* This is a convenience method only.
* It is intended to be used when one called to an Assembler is made.
* If more than one call to an Assembler is made then #createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(DomainObjectMap, Set) should be used.
* @param domainObjectSet - Set of ims.emergency.domain.objects.EDPartialAdmission objects.
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(java.util.Set domainObjectSet)
{
return createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(new DomainObjectMap(), domainObjectSet);
}
/**
* Create the ValueObject collection to hold the set of DomainObjects.
* @param map - maps DomainObjects to created ValueObjects
* @param domainObjectSet - Set of ims.emergency.domain.objects.EDPartialAdmission objects.
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(DomainObjectMap map, java.util.Set domainObjectSet)
{
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection voList = new ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection();
if ( null == domainObjectSet )
{
return voList;
}
int rieCount=0;
int activeCount=0;
java.util.Iterator iterator = domainObjectSet.iterator();
while( iterator.hasNext() )
{
ims.emergency.domain.objects.EDPartialAdmission domainObject = (ims.emergency.domain.objects.EDPartialAdmission) iterator.next();
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo vo = create(map, domainObject);
if (vo != null)
voList.add(vo);
if (domainObject != null)
{
if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true)
rieCount++;
else
activeCount++;
}
}
voList.setRieCount(rieCount);
voList.setActiveCount(activeCount);
return voList;
}
/**
* Create the ValueObject collection to hold the list of DomainObjects.
* @param domainObjectList - List of ims.emergency.domain.objects.EDPartialAdmission objects.
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(java.util.List domainObjectList)
{
return createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(new DomainObjectMap(), domainObjectList);
}
/**
* Create the ValueObject collection to hold the list of DomainObjects.
* @param map - maps DomainObjects to created ValueObjects
* @param domainObjectList - List of ims.emergency.domain.objects.EDPartialAdmission objects.
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection createEDPartialAdmissionForDischargeDetailOutcomeVoCollectionFromEDPartialAdmission(DomainObjectMap map, java.util.List domainObjectList)
{
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection voList = new ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection();
if ( null == domainObjectList )
{
return voList;
}
int rieCount=0;
int activeCount=0;
for (int i = 0; i < domainObjectList.size(); i++)
{
ims.emergency.domain.objects.EDPartialAdmission domainObject = (ims.emergency.domain.objects.EDPartialAdmission) domainObjectList.get(i);
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo vo = create(map, domainObject);
if (vo != null)
voList.add(vo);
if (domainObject != null)
{
if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true)
rieCount++;
else
activeCount++;
}
}
voList.setRieCount(rieCount);
voList.setActiveCount(activeCount);
return voList;
}
/**
* Create the ims.emergency.domain.objects.EDPartialAdmission set from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
public static java.util.Set extractEDPartialAdmissionSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection voCollection)
{
return extractEDPartialAdmissionSet(domainFactory, voCollection, null, new HashMap());
}
public static java.util.Set extractEDPartialAdmissionSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection voCollection, java.util.Set domainObjectSet, HashMap domMap)
{
int size = (null == voCollection) ? 0 : voCollection.size();
if (domainObjectSet == null)
{
domainObjectSet = new java.util.HashSet();
}
java.util.Set newSet = new java.util.HashSet();
for(int i=0; i<size; i++)
{
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo vo = voCollection.get(i);
ims.emergency.domain.objects.EDPartialAdmission domainObject = EDPartialAdmissionForDischargeDetailOutcomeVoAssembler.extractEDPartialAdmission(domainFactory, vo, domMap);
//TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it.
if (domainObject == null)
{
continue;
}
//Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add)
if (!domainObjectSet.contains(domainObject)) domainObjectSet.add(domainObject);
newSet.add(domainObject);
}
java.util.Set removedSet = new java.util.HashSet();
java.util.Iterator iter = domainObjectSet.iterator();
//Find out which objects need to be removed
while (iter.hasNext())
{
ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next();
if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o))
{
removedSet.add(o);
}
}
iter = removedSet.iterator();
//Remove the unwanted objects
while (iter.hasNext())
{
domainObjectSet.remove(iter.next());
}
return domainObjectSet;
}
/**
* Create the ims.emergency.domain.objects.EDPartialAdmission list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
public static java.util.List extractEDPartialAdmissionList(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection voCollection)
{
return extractEDPartialAdmissionList(domainFactory, voCollection, null, new HashMap());
}
public static java.util.List extractEDPartialAdmissionList(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVoCollection voCollection, java.util.List domainObjectList, HashMap domMap)
{
int size = (null == voCollection) ? 0 : voCollection.size();
if (domainObjectList == null)
{
domainObjectList = new java.util.ArrayList();
}
for(int i=0; i<size; i++)
{
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo vo = voCollection.get(i);
ims.emergency.domain.objects.EDPartialAdmission domainObject = EDPartialAdmissionForDischargeDetailOutcomeVoAssembler.extractEDPartialAdmission(domainFactory, vo, domMap);
//TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it.
if (domainObject == null)
{
continue;
}
int domIdx = domainObjectList.indexOf(domainObject);
if (domIdx == -1)
{
domainObjectList.add(i, domainObject);
}
else if (i != domIdx && i < domainObjectList.size())
{
Object tmp = domainObjectList.get(i);
domainObjectList.set(i, domainObjectList.get(domIdx));
domainObjectList.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int i1=domainObjectList.size();
while (i1 > size)
{
domainObjectList.remove(i1-1);
i1=domainObjectList.size();
}
return domainObjectList;
}
/**
* Create the ValueObject from the ims.emergency.domain.objects.EDPartialAdmission object.
* @param domainObject ims.emergency.domain.objects.EDPartialAdmission
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo create(ims.emergency.domain.objects.EDPartialAdmission domainObject)
{
if (null == domainObject)
{
return null;
}
DomainObjectMap map = new DomainObjectMap();
return create(map, domainObject);
}
/**
* Create the ValueObject from the ims.emergency.domain.objects.EDPartialAdmission object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param domainObject
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo create(DomainObjectMap map, ims.emergency.domain.objects.EDPartialAdmission domainObject)
{
if (null == domainObject)
{
return null;
}
// check if the domainObject already has a valueObject created for it
ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObject = (ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo) map.getValueObject(domainObject, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo.class);
if ( null == valueObject )
{
valueObject = new ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo(domainObject.getId(), domainObject.getVersion());
map.addValueObject(domainObject, valueObject);
valueObject = insert(map, valueObject, domainObject);
}
return valueObject;
}
/**
* Update the ValueObject with the Domain Object.
* @param valueObject to be updated
* @param domainObject ims.emergency.domain.objects.EDPartialAdmission
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo insert(ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObject, ims.emergency.domain.objects.EDPartialAdmission domainObject)
{
if (null == domainObject)
{
return valueObject;
}
DomainObjectMap map = new DomainObjectMap();
return insert(map, valueObject, domainObject);
}
/**
* Update the ValueObject with the Domain Object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param valueObject to be updated
* @param domainObject ims.emergency.domain.objects.EDPartialAdmission
*/
public static ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo insert(DomainObjectMap map, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObject, ims.emergency.domain.objects.EDPartialAdmission domainObject)
{
if (null == domainObject)
{
return valueObject;
}
if (null == map)
{
map = new DomainObjectMap();
}
valueObject.setID_EDPartialAdmission(domainObject.getId());
valueObject.setIsRIE(domainObject.getIsRIE());
// If this is a recordedInError record, and the domainObject
// value isIncludeRecord has not been set, then we return null and
// not the value object
if (valueObject.getIsRIE() != null && valueObject.getIsRIE().booleanValue() == true && !domainObject.isIncludeRecord())
return null;
// If this is not a recordedInError record, and the domainObject
// value isIncludeRecord has been set, then we return null and
// not the value object
if ((valueObject.getIsRIE() == null || valueObject.getIsRIE().booleanValue() == false) && domainObject.isIncludeRecord())
return null;
// DecisionToAdmitDateTime
java.util.Date DecisionToAdmitDateTime = domainObject.getDecisionToAdmitDateTime();
if ( null != DecisionToAdmitDateTime )
{
valueObject.setDecisionToAdmitDateTime(new ims.framework.utils.DateTime(DecisionToAdmitDateTime) );
}
// Specialty
ims.domain.lookups.LookupInstance instance2 = domainObject.getSpecialty();
if ( null != instance2 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance2.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance2.getImage().getImageId(), instance2.getImage().getImagePath());
}
color = instance2.getColor();
if (color != null)
color.getValue();
ims.core.vo.lookups.Specialty voLookup2 = new ims.core.vo.lookups.Specialty(instance2.getId(),instance2.getText(), instance2.isActive(), null, img, color);
ims.core.vo.lookups.Specialty parentVoLookup2 = voLookup2;
ims.domain.lookups.LookupInstance parent2 = instance2.getParent();
while (parent2 != null)
{
if (parent2.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent2.getImage().getImageId(), parent2.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent2.getColor();
if (color != null)
color.getValue();
parentVoLookup2.setParent(new ims.core.vo.lookups.Specialty(parent2.getId(),parent2.getText(), parent2.isActive(), null, img, color));
parentVoLookup2 = parentVoLookup2.getParent();
parent2 = parent2.getParent();
}
valueObject.setSpecialty(voLookup2);
}
// AllocatedStatus
ims.domain.lookups.LookupInstance instance3 = domainObject.getAllocatedStatus();
if ( null != instance3 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance3.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance3.getImage().getImageId(), instance3.getImage().getImagePath());
}
color = instance3.getColor();
if (color != null)
color.getValue();
ims.emergency.vo.lookups.AllocationStatus voLookup3 = new ims.emergency.vo.lookups.AllocationStatus(instance3.getId(),instance3.getText(), instance3.isActive(), null, img, color);
ims.emergency.vo.lookups.AllocationStatus parentVoLookup3 = voLookup3;
ims.domain.lookups.LookupInstance parent3 = instance3.getParent();
while (parent3 != null)
{
if (parent3.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent3.getImage().getImageId(), parent3.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent3.getColor();
if (color != null)
color.getValue();
parentVoLookup3.setParent(new ims.emergency.vo.lookups.AllocationStatus(parent3.getId(),parent3.getText(), parent3.isActive(), null, img, color));
parentVoLookup3 = parentVoLookup3.getParent();
parent3 = parent3.getParent();
}
valueObject.setAllocatedStatus(voLookup3);
<|fim▁hole|> }
// AllocatedBedType
ims.domain.lookups.LookupInstance instance4 = domainObject.getAllocatedBedType();
if ( null != instance4 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance4.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance4.getImage().getImageId(), instance4.getImage().getImagePath());
}
color = instance4.getColor();
if (color != null)
color.getValue();
ims.emergency.vo.lookups.AllocatedBedType voLookup4 = new ims.emergency.vo.lookups.AllocatedBedType(instance4.getId(),instance4.getText(), instance4.isActive(), null, img, color);
ims.emergency.vo.lookups.AllocatedBedType parentVoLookup4 = voLookup4;
ims.domain.lookups.LookupInstance parent4 = instance4.getParent();
while (parent4 != null)
{
if (parent4.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent4.getImage().getImageId(), parent4.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent4.getColor();
if (color != null)
color.getValue();
parentVoLookup4.setParent(new ims.emergency.vo.lookups.AllocatedBedType(parent4.getId(),parent4.getText(), parent4.isActive(), null, img, color));
parentVoLookup4 = parentVoLookup4.getParent();
parent4 = parent4.getParent();
}
valueObject.setAllocatedBedType(voLookup4);
}
// AuthoringInfo
valueObject.setAuthoringInfo(ims.core.vo.domain.AuthoringInformationVoAssembler.create(map, domainObject.getAuthoringInfo()) );
// AllocatedDateTime
java.util.Date AllocatedDateTime = domainObject.getAllocatedDateTime();
if ( null != AllocatedDateTime )
{
valueObject.setAllocatedDateTime(new ims.framework.utils.DateTime(AllocatedDateTime) );
}
// AdmittingConsultant
valueObject.setAdmittingConsultant(ims.core.vo.domain.HcpMinVoAssembler.create(map, domainObject.getAdmittingConsultant()) );
// AccomodationRequestedType
ims.domain.lookups.LookupInstance instance8 = domainObject.getAccomodationRequestedType();
if ( null != instance8 ) {
ims.framework.utils.ImagePath img = null;
ims.framework.utils.Color color = null;
img = null;
if (instance8.getImage() != null)
{
img = new ims.framework.utils.ImagePath(instance8.getImage().getImageId(), instance8.getImage().getImagePath());
}
color = instance8.getColor();
if (color != null)
color.getValue();
ims.core.vo.lookups.AccomodationRequestedType voLookup8 = new ims.core.vo.lookups.AccomodationRequestedType(instance8.getId(),instance8.getText(), instance8.isActive(), null, img, color);
ims.core.vo.lookups.AccomodationRequestedType parentVoLookup8 = voLookup8;
ims.domain.lookups.LookupInstance parent8 = instance8.getParent();
while (parent8 != null)
{
if (parent8.getImage() != null)
{
img = new ims.framework.utils.ImagePath(parent8.getImage().getImageId(), parent8.getImage().getImagePath() );
}
else
{
img = null;
}
color = parent8.getColor();
if (color != null)
color.getValue();
parentVoLookup8.setParent(new ims.core.vo.lookups.AccomodationRequestedType(parent8.getId(),parent8.getText(), parent8.isActive(), null, img, color));
parentVoLookup8 = parentVoLookup8.getParent();
parent8 = parent8.getParent();
}
valueObject.setAccomodationRequestedType(voLookup8);
}
return valueObject;
}
/**
* Create the domain object from the value object.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param valueObject - extract the domain object fields from this.
*/
public static ims.emergency.domain.objects.EDPartialAdmission extractEDPartialAdmission(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObject)
{
return extractEDPartialAdmission(domainFactory, valueObject, new HashMap());
}
public static ims.emergency.domain.objects.EDPartialAdmission extractEDPartialAdmission(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo valueObject, HashMap domMap)
{
if (null == valueObject)
{
return null;
}
Integer id = valueObject.getID_EDPartialAdmission();
ims.emergency.domain.objects.EDPartialAdmission domainObject = null;
if ( null == id)
{
if (domMap.get(valueObject) != null)
{
return (ims.emergency.domain.objects.EDPartialAdmission)domMap.get(valueObject);
}
// ims.emergency.vo.EDPartialAdmissionForDischargeDetailOutcomeVo ID_EDPartialAdmission field is unknown
domainObject = new ims.emergency.domain.objects.EDPartialAdmission();
domMap.put(valueObject, domainObject);
}
else
{
String key = (valueObject.getClass().getName() + "__" + valueObject.getID_EDPartialAdmission());
if (domMap.get(key) != null)
{
return (ims.emergency.domain.objects.EDPartialAdmission)domMap.get(key);
}
domainObject = (ims.emergency.domain.objects.EDPartialAdmission) domainFactory.getDomainObject(ims.emergency.domain.objects.EDPartialAdmission.class, id );
//TODO: Not sure how this should be handled. Effectively it must be a staleobject exception, but maybe should be handled as that further up.
if (domainObject == null)
return null;
domMap.put(key, domainObject);
}
domainObject.setVersion(valueObject.getVersion_EDPartialAdmission());
ims.framework.utils.DateTime dateTime1 = valueObject.getDecisionToAdmitDateTime();
java.util.Date value1 = null;
if ( dateTime1 != null )
{
value1 = dateTime1.getJavaDate();
}
domainObject.setDecisionToAdmitDateTime(value1);
// create LookupInstance from vo LookupType
ims.domain.lookups.LookupInstance value2 = null;
if ( null != valueObject.getSpecialty() )
{
value2 =
domainFactory.getLookupInstance(valueObject.getSpecialty().getID());
}
domainObject.setSpecialty(value2);
// create LookupInstance from vo LookupType
ims.domain.lookups.LookupInstance value3 = null;
if ( null != valueObject.getAllocatedStatus() )
{
value3 =
domainFactory.getLookupInstance(valueObject.getAllocatedStatus().getID());
}
domainObject.setAllocatedStatus(value3);
// create LookupInstance from vo LookupType
ims.domain.lookups.LookupInstance value4 = null;
if ( null != valueObject.getAllocatedBedType() )
{
value4 =
domainFactory.getLookupInstance(valueObject.getAllocatedBedType().getID());
}
domainObject.setAllocatedBedType(value4);
// SaveAsRefVO - treated as a refVo in extract methods
ims.core.clinical.domain.objects.AuthoringInformation value5 = null;
if ( null != valueObject.getAuthoringInfo() )
{
if (valueObject.getAuthoringInfo().getBoId() == null)
{
if (domMap.get(valueObject.getAuthoringInfo()) != null)
{
value5 = (ims.core.clinical.domain.objects.AuthoringInformation)domMap.get(valueObject.getAuthoringInfo());
}
}
else
{
value5 = (ims.core.clinical.domain.objects.AuthoringInformation)domainFactory.getDomainObject(ims.core.clinical.domain.objects.AuthoringInformation.class, valueObject.getAuthoringInfo().getBoId());
}
}
domainObject.setAuthoringInfo(value5);
ims.framework.utils.DateTime dateTime6 = valueObject.getAllocatedDateTime();
java.util.Date value6 = null;
if ( dateTime6 != null )
{
value6 = dateTime6.getJavaDate();
}
domainObject.setAllocatedDateTime(value6);
// SaveAsRefVO - treated as a refVo in extract methods
ims.core.resource.people.domain.objects.Hcp value7 = null;
if ( null != valueObject.getAdmittingConsultant() )
{
if (valueObject.getAdmittingConsultant().getBoId() == null)
{
if (domMap.get(valueObject.getAdmittingConsultant()) != null)
{
value7 = (ims.core.resource.people.domain.objects.Hcp)domMap.get(valueObject.getAdmittingConsultant());
}
}
else
{
value7 = (ims.core.resource.people.domain.objects.Hcp)domainFactory.getDomainObject(ims.core.resource.people.domain.objects.Hcp.class, valueObject.getAdmittingConsultant().getBoId());
}
}
domainObject.setAdmittingConsultant(value7);
// create LookupInstance from vo LookupType
ims.domain.lookups.LookupInstance value8 = null;
if ( null != valueObject.getAccomodationRequestedType() )
{
value8 =
domainFactory.getLookupInstance(valueObject.getAccomodationRequestedType().getID());
}
domainObject.setAccomodationRequestedType(value8);
return domainObject;
}
}<|fim▁end|>
| |
<|file_name|>malware-indicator-for-file-hash_consumer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2014, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import sys
from stix.core import STIXPackage
def parse_stix(pkg):
print("== MALWARE ==")
for fam in pkg.ttps:
print("---")
print("Title : " + fam.title)
print("ID : " + fam.id_)
for sample in fam.behavior.malware_instances:
print("Sample: " + str(sample.names[0]))
print("Type: " + str(sample.types[0]))
for ind in pkg.indicators:
print("---")
print("Title : " + ind.title)
print("Type : " + str(ind.indicator_types[0]))
print("ID -> : " + ind.indicated_ttps[0].item.idref)
for obs in ind.observables:
for digest in obs.object_.properties.hashes:
print("Hash : " + str(digest))
return 0
if __name__ == '__main__':
try:
fname = sys.argv[1]
except:<|fim▁hole|> parse_stix(stix_pkg)<|fim▁end|>
|
exit(1)
fd = open(fname)
stix_pkg = STIXPackage.from_xml(fd)
|
<|file_name|>tool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
##
# Copyright (c) 2007 Apple Inc.
#
# This is the MIT license. This software may also be distributed under the
# same terms as Python (the PSF license).
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
##
import sys
import os
import getopt
import xattr
import zlib
def usage(e=None):
if e:
print(e)
print("")
name = os.path.basename(sys.argv[0])
print("usage: %s [-lz] file [file ...]" % (name,))
print(" %s -p [-lz] attr_name file [file ...]" % (name,))
print(" %s -w [-z] attr_name attr_value file [file ...]" % (name,))
print(" %s -d attr_name file [file ...]" % (name,))
print("")
print("The first form lists the names of all xattrs on the given file(s).")
print("The second form (-p) prints the value of the xattr attr_name.")
print("The third form (-w) sets the value of the xattr attr_name to attr_value.")
print("The fourth form (-d) deletes the xattr attr_name.")
print("")
print("options:")
print(" -h: print this help")
print(" -l: print long format (attr_name: attr_value)")
print(" -z: compress or decompress (if compressed) attribute value in zip format")
if e:
sys.exit(64)
else:
sys.exit(0)
class NullsInString(Exception):
"""Nulls in string."""
_FILTER=''.join([(len(repr(chr(x)))==3) and chr(x) or '.' for x in range(256)])
def _dump(src, length=16):
result=[]
for i in range(0, len(src), length):
s = src[i:i+length]
hexa = ' '.join(["%02X"%ord(x) for x in s])
printable = s.translate(_FILTER)
result.append("%04X %-*s %s\n" % (i, length*3, hexa, printable))
return ''.join(result)
def main():
try:
(optargs, args) = getopt.getopt(sys.argv[1:], "hlpwdz", ["help"])
except getopt.GetoptError as e:
usage(e)
attr_name = None
long_format = False
read = False
write = False
delete = False
compress = lambda x: x
decompress = compress
status = 0
for opt, arg in optargs:
if opt in ("-h", "--help"):
usage()
elif opt == "-l":
long_format = True
elif opt == "-p":
read = True
if write or delete:
usage("-p not allowed with -w or -d")
elif opt == "-w":
write = True
if read or delete:
usage("-w not allowed with -p or -d")
elif opt == "-d":
delete = True
if read or write:
usage("-d not allowed with -p or -w")
elif opt == "-z":
compress = zlib.compress
decompress = zlib.decompress
if write or delete:
if long_format:
usage("-l not allowed with -w or -p")
if read or write or delete:
if not args:
usage("No attr_name")
attr_name = args.pop(0)
if write:
if not args:
usage("No attr_value")
attr_value = args.pop(0)
if len(args) > 1:
multiple_files = True
else:
multiple_files = False
for filename in args:
def onError(e):
if not os.path.exists(filename):
sys.stderr.write("No such file: %s\n" % (filename,))
else:
sys.stderr.write(str(e) + "\n")
status = 1
try:
attrs = xattr.xattr(filename)
except (IOError, OSError) as e:
onError(e)
continue
<|fim▁hole|> except (IOError, OSError) as e:
onError(e)
continue
elif delete:
try:
del attrs[attr_name]
except (IOError, OSError) as e:
onError(e)
continue
except KeyError:
onError("No such xattr: %s" % (attr_name,))
continue
else:
try:
if read:
attr_names = (attr_name,)
else:
attr_names = list(attrs.keys())
except (IOError, OSError) as e:
onError(e)
continue
if multiple_files:
file_prefix = "%s: " % (filename,)
else:
file_prefix = ""
for attr_name in attr_names:
try:
try:
attr_value = decompress(attrs[attr_name])
except zlib.error:
attr_value = attrs[attr_name]
except KeyError:
onError("%sNo such xattr: %s" % (file_prefix, attr_name))
continue
if long_format:
try:
if attr_value.find('\0') >= 0:
raise NullsInString;
print("".join((file_prefix, "%s: " % (attr_name,), attr_value)))
except (UnicodeDecodeError, NullsInString):
print("".join((file_prefix, "%s:" % (attr_name,))))
print(_dump(attr_value))
else:
if read:
print("".join((file_prefix, attr_value)))
else:
print("".join((file_prefix, attr_name)))
sys.exit(status)
if __name__ == "__main__":
main()<|fim▁end|>
|
if write:
try:
attrs[attr_name] = compress(attr_value)
|
<|file_name|>fault.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import six
import webob.dec
import webob.exc
from manila.api.openstack import wsgi
from manila.i18n import _
from manila import utils
from manila.wsgi import common as base_wsgi
LOG = log.getLogger(__name__)
class FaultWrapper(base_wsgi.Middleware):
"""Calls down the middleware stack, making exceptions into faults."""<|fim▁hole|>
@staticmethod
def status_to_type(status):
if not FaultWrapper._status_to_type:
for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError):
FaultWrapper._status_to_type[clazz.code] = clazz
return FaultWrapper._status_to_type.get(
status, webob.exc.HTTPInternalServerError)()
def _error(self, inner, req):
if isinstance(inner, UnicodeDecodeError):
msg = _("Error decoding your request. Either the URL or the "
"request body contained characters that could not be "
"decoded by Manila.")
return wsgi.Fault(webob.exc.HTTPBadRequest(explanation=msg))
LOG.exception("Caught error: %s", inner)
safe = getattr(inner, 'safe', False)
headers = getattr(inner, 'headers', None)
status = getattr(inner, 'code', 500)
if status is None:
status = 500
msg_dict = dict(url=req.url, status=status)
LOG.info("%(url)s returned with HTTP %(status)d", msg_dict)
outer = self.status_to_type(status)
if headers:
outer.headers = headers
# NOTE(johannes): We leave the explanation empty here on
# purpose. It could possibly have sensitive information
# that should not be returned back to the user. See
# bugs 868360 and 874472
# NOTE(eglynn): However, it would be over-conservative and
# inconsistent with the EC2 API to hide every exception,
# including those that are safe to expose, see bug 1021373
if safe:
outer.explanation = '%s: %s' % (inner.__class__.__name__,
six.text_type(inner))
return wsgi.Fault(outer)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
try:
return req.get_response(self.application)
except Exception as ex:
return self._error(ex, req)<|fim▁end|>
|
_status_to_type = {}
|
<|file_name|>recsys.py<|end_file_name|><|fim▁begin|>from math import exp
from collections import defaultdict
@outputSchema("scaled: double")
def logistic_scale(val, logistic_param):
return -1.0 + 2.0 / (1.0 + exp(-logistic_param * val))
@outputSchema("t: (item_A, item_B, dist: double, raw_weight: double)")<|fim▁hole|>def best_path(paths):
return sorted(paths, key=lambda t:t[2])[0]
@outputSchema("t: (item_A, item_B, dist: double, raw_weight: double, link_data: map[], linking_item: chararray)")
def best_path_detailed(paths):
return sorted(paths, key=lambda t:t[2])[0]
@outputSchema("signal_map:map[]")
def aggregate_signal_types(signal_list):
signal_dict = {}
for row in signal_list:
if row[3]:
if not signal_dict.get(row[3]):
signal_dict[row[3]] = 0
signal_dict[row[3]] += 1
return signal_dict
@outputSchema("signal_map:map[]")
def combine_signals(signal_list):
signal_dict = {}
for row in signal_list:
if row[3]:
for val in row[3].keys():
if not signal_dict.get(row[3]):
signal_dict[row[3]] = 0
signal_dict[val] += row[3][val]
return signal_dict<|fim▁end|>
| |
<|file_name|>bencode_roundtrip.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use synapse_bencode as bencode;
fuzz_target!(|fuzz_data: &[u8]| {
if let Ok(initial_bencode) = bencode::decode_buf(fuzz_data) {
let mut buf = Vec::<u8>::new();
initial_bencode.encode(&mut buf).unwrap();
let roundtripped_bencode = bencode::decode_buf(&buf).unwrap();
assert_eq!(initial_bencode, roundtripped_bencode);
};
});<|fim▁end|>
|
#![no_main]
use libfuzzer_sys::fuzz_target;
|
<|file_name|>base_component.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
class BaseComponent:
"""This is a basic interface for defining components.
The only requirement is to implement the name method that
uniquely identifies a component. It should also define other
methods that implement the component functionality.<|fim▁hole|> @classmethod
def name(cls):
raise NotImplementedError()<|fim▁end|>
|
"""
|
<|file_name|>ProjectCalcDaoImpl.java<|end_file_name|><|fim▁begin|>package com.management.dao.impl.calcs;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.dbutils.QueryRunner;
import org.apache.commons.dbutils.handlers.BeanListHandler;
import com.management.bean.calculate.ProjectCalc;
import com.management.bean.calculate.ProjectCalcs;
import com.management.dao.calcs.ProjectCalcDao;
import com.management.util.DBUtil;
public class ProjectCalcDaoImpl implements ProjectCalcDao {
@Override
public List<ProjectCalc> find() {
try {
Connection conn = DBUtil.getConnection();
String sql = "select * from project_calc";
QueryRunner qr = new QueryRunner();
return qr.query(conn, sql, new BeanListHandler<>(ProjectCalc.class));
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void add(ProjectCalc c) {
try {
Connection conn = DBUtil.getConnection();
String sql = "insert into project_calc(rank,category,weight,high,low) values(?,?,?,?,?)";
QueryRunner qr = new QueryRunner();
Object[] params = {c.getRank(),c.getCategory(),c.getWeight(),c.getHigh(),c.getLow()};
qr.update(conn, sql, params);
ProjectCalcs.update();
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void update(ProjectCalc c) {
try {
Connection conn = DBUtil.getConnection();
String sql = "update project_calc set rank=?,category=?,weight=?,high=?,low=? where id=?";
QueryRunner qr = new QueryRunner();
Object[] params = {c.getRank(),c.getCategory(),c.getWeight(),c.getHigh(),c.getLow(),c.getId()};
qr.update(conn, sql, params);
ProjectCalcs.update();
} catch (SQLException e) {
throw new RuntimeException(e);<|fim▁hole|> }
@Override
public void delete(int id) {
try {
Connection conn = DBUtil.getConnection();
String sql = "delete from project_calc where id=?";
QueryRunner qr = new QueryRunner();
qr.update(conn, sql,id);
ProjectCalcs.update();
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}<|fim▁end|>
|
}
|
<|file_name|>image.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! CSS handling for the computed value of
//! [`image`][image]s
//!
//! [image]: https://drafts.csswg.org/css-images/#image-values
use crate::values::computed::position::Position;
use crate::values::computed::url::ComputedImageUrl;
#[cfg(feature = "gecko")]
use crate::values::computed::NumberOrPercentage;
use crate::values::computed::{Angle, Color, Context};
use crate::values::computed::{
LengthPercentage, NonNegativeLength, NonNegativeLengthPercentage, ToComputedValue,
};
use crate::values::generics::image::{self as generic, GradientCompatMode};
use crate::values::specified::image::LineDirection as SpecifiedLineDirection;
use crate::values::specified::position::{HorizontalPositionKeyword, VerticalPositionKeyword};
use std::f32::consts::PI;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
/// A computed image layer.
pub type ImageLayer = generic::GenericImageLayer<Image>;
/// Computed values for an image according to CSS-IMAGES.
/// <https://drafts.csswg.org/css-images/#image-values>
pub type Image = generic::GenericImage<Gradient, MozImageRect, ComputedImageUrl>;
/// Computed values for a CSS gradient.
/// <https://drafts.csswg.org/css-images/#gradients>
pub type Gradient = generic::GenericGradient<
LineDirection,
LengthPercentage,
NonNegativeLength,
NonNegativeLengthPercentage,
Position,
Color,
>;
/// A computed radial gradient ending shape.
pub type EndingShape = generic::GenericEndingShape<NonNegativeLength, NonNegativeLengthPercentage>;
/// A computed gradient line direction.
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToResolvedValue)]
#[repr(C, u8)]
pub enum LineDirection {<|fim▁hole|> Angle(Angle),
/// A horizontal direction.
Horizontal(HorizontalPositionKeyword),
/// A vertical direction.
Vertical(VerticalPositionKeyword),
/// A corner.
Corner(HorizontalPositionKeyword, VerticalPositionKeyword),
}
/// A computed gradient item.
pub type GradientItem = generic::GenericGradientItem<Color, LengthPercentage>;
/// A computed color stop.
pub type ColorStop = generic::ColorStop<Color, LengthPercentage>;
/// Computed values for `-moz-image-rect(...)`.
#[cfg(feature = "gecko")]
pub type MozImageRect = generic::MozImageRect<NumberOrPercentage, ComputedImageUrl>;
/// Empty enum on non-gecko
#[cfg(not(feature = "gecko"))]
pub type MozImageRect = crate::values::specified::image::MozImageRect;
impl generic::LineDirection for LineDirection {
fn points_downwards(&self, compat_mode: GradientCompatMode) -> bool {
match *self {
LineDirection::Angle(angle) => angle.radians() == PI,
LineDirection::Vertical(VerticalPositionKeyword::Bottom) => {
compat_mode == GradientCompatMode::Modern
},
LineDirection::Vertical(VerticalPositionKeyword::Top) => {
compat_mode != GradientCompatMode::Modern
},
_ => false,
}
}
fn to_css<W>(&self, dest: &mut CssWriter<W>, compat_mode: GradientCompatMode) -> fmt::Result
where
W: Write,
{
match *self {
LineDirection::Angle(ref angle) => angle.to_css(dest),
LineDirection::Horizontal(x) => {
if compat_mode == GradientCompatMode::Modern {
dest.write_str("to ")?;
}
x.to_css(dest)
},
LineDirection::Vertical(y) => {
if compat_mode == GradientCompatMode::Modern {
dest.write_str("to ")?;
}
y.to_css(dest)
},
LineDirection::Corner(x, y) => {
if compat_mode == GradientCompatMode::Modern {
dest.write_str("to ")?;
}
x.to_css(dest)?;
dest.write_str(" ")?;
y.to_css(dest)
},
}
}
}
impl ToComputedValue for SpecifiedLineDirection {
type ComputedValue = LineDirection;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
SpecifiedLineDirection::Angle(ref angle) => {
LineDirection::Angle(angle.to_computed_value(context))
},
SpecifiedLineDirection::Horizontal(x) => LineDirection::Horizontal(x),
SpecifiedLineDirection::Vertical(y) => LineDirection::Vertical(y),
SpecifiedLineDirection::Corner(x, y) => LineDirection::Corner(x, y),
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
match *computed {
LineDirection::Angle(ref angle) => {
SpecifiedLineDirection::Angle(ToComputedValue::from_computed_value(angle))
},
LineDirection::Horizontal(x) => SpecifiedLineDirection::Horizontal(x),
LineDirection::Vertical(y) => SpecifiedLineDirection::Vertical(y),
LineDirection::Corner(x, y) => SpecifiedLineDirection::Corner(x, y),
}
}
}<|fim▁end|>
|
/// An angle.
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use amiquip::{Connection, ConsumerMessage, ConsumerOptions, QueueDeclareOptions, Result};
fn main() -> Result<()> {
// Open connection.
let mut connection = Connection::insecure_open("amqp://guest:guest@mkstack_rabbitmq:5672")?;
// Open a channel - None says let the library choose the channel ID.
let channel = connection.open_channel(None)?;
// Declare the "hello" queue.
let queue = channel.queue_declare("hello", QueueDeclareOptions::default())?;
// Start a consumer.
let consumer = queue.consume(ConsumerOptions::default())?;
for (i, message) in consumer.receiver().iter().enumerate() {
match message {<|fim▁hole|> ConsumerMessage::Delivery(delivery) => {
let body = String::from_utf8_lossy(&delivery.body);
println!("({:>3}) Received [{}]", i, body);
consumer.ack(delivery)?;
}
other => {
println!("Consumer ended: {:?}", other);
break;
}
}
}
connection.close()
}<|fim▁end|>
| |
<|file_name|>pkg.installspace.context.pc.py<|end_file_name|><|fim▁begin|># generated from catkin/cmake/template/pkg.context.pc.in<|fim▁hole|>PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/jorge/tum_simulator_ws/install/include".split(';') if "/home/jorge/tum_simulator_ws/install/include" != "" else []
PROJECT_CATKIN_DEPENDS = "message_runtime".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "cvg_sim_msgs"
PROJECT_SPACE_DIR = "/home/jorge/tum_simulator_ws/install"
PROJECT_VERSION = "0.0.0"<|fim▁end|>
|
CATKIN_PACKAGE_PREFIX = ""
|
<|file_name|>pulsedescriptor.hpp<|end_file_name|><|fim▁begin|>/*
===============================================================================
FILE: pulsedescriptor.hpp
CONTENTS:
Describes the way that outgoing and returning waveform of a pulse is stored
in the PulseWaves. There can be multiple pulsedescriptors each describing a
different composition of samplings.
PROGRAMMERS:
[email protected] - http://rapidlasso.com
COPYRIGHT:
(c) 2007-2013, martin isenburg, rapidlasso - fast tools to catch reality
This is free software; you can redistribute and/or modify it under the
terms of the GNU Lesser General Licence as published by the Free Software
Foundation. See the COPYING.txt file for more information.
This software is distributed WITHOUT ANY WARRANTY and without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
CHANGE HISTORY:
02 March 2012 -- created before celebrating Silke's birthday at Hafen 2
===============================================================================
*/
#ifndef PULSE_DESCRIPTOR_HPP
#define PULSE_DESCRIPTOR_HPP
#include "mydefs.hpp"
#include "pulsewavesdefinitions.hpp"
class ByteStreamIn;
class ByteStreamOut;
class PULSEsampling
{
public:
// start of attributes
U32 size; // byte-aligned size from start to end of attributes (including the PULSEWAVES_DESCRIPTION_SIZE bytes for description)
U32 reserved; // must be zero
U8 type; // 0 - undefined, 1 - outgoing, 2 - returning
U8 channel;
U8 unused; // must be zero
U8 bits_for_duration_from_anchor; // 0, 8, 16, or 32
F32 scale_for_duration_from_anchor; // default is 1.0f
F32 offset_for_duration_from_anchor; // default is 0.0f
U8 bits_for_number_of_segments; // 0 or 8 or 16
U8 bits_for_number_of_samples; // 0 or 8 or 16
U16 number_of_segments;
U32 number_of_samples;
U16 bits_per_sample; // 8 or 16
U16 lookup_table_index; // index of 1 or higher to PULSEtable stored in VLR/AVLR, 0 means no lookup table.
F32 sample_units; // [nanoseconds]
U32 compression; // must be zero
// space for new attributes
// ...
// space for new attributes
CHAR description[PULSEWAVES_DESCRIPTION_SIZE];
// end of attributes
U32 size_of_attributes() const;
BOOL load(ByteStreamIn* stream);
BOOL save(ByteStreamOut* stream) const;
BOOL is_equal(const PULSEsampling* sampling) const;
PULSEsampling();
};
class PULSEcomposition
{
public:
// start of attributes
U32 size; // byte-aligned size from start to end of attributes (including the PULSEWAVES_DESCRIPTION_SIZE bytes for description)
U32 reserved; // must be zero
I32 optical_center_to_anchor_point; // a fixed offset between the two [sampling units]
U16 number_of_extra_waves_bytes; // must be zero
U16 number_of_samplings;
U32 scanner_index;
F32 sample_units; // [nanoseconds]
U32 compression; // must be zero
// space for new attributes
// ...
// space for new attributes
CHAR description[PULSEWAVES_DESCRIPTION_SIZE];
// end of attributes<|fim▁hole|> BOOL save(ByteStreamOut* stream) const;
BOOL is_equal(const PULSEcomposition* composition) const;
PULSEcomposition();
};
class PULSEdescriptor
{
public:
PULSEcomposition* composition;
PULSEsampling* samplings;
BOOL load(ByteStreamIn* stream);
BOOL save(ByteStreamOut* stream) const;
BOOL is_equal(const PULSEcomposition* composition, const PULSEsampling* sampling) const;
BOOL is_equal(const PULSEdescriptor* descriptor) const;
PULSEdescriptor();
};
#endif<|fim▁end|>
|
U32 size_of_attributes() const;
BOOL load(ByteStreamIn* stream);
|
<|file_name|>LeetCode0363.java<|end_file_name|><|fim▁begin|>public class LeetCode0363 {
public int maxSumSubmatrix(int[][] matrix, int k) {
int m = matrix.length, n = matrix[0].length, ans = Integer.MIN_VALUE;
long[] sum = new long[m + 1];
for (int i = 0; i < n; ++i) {
long[] sumInRow = new long[m];
for (int j = i; j < n; ++j) {
for (int p = 0; p < m; ++p) {
sumInRow[p] += matrix[p][j];
sum[p + 1] = sum[p] + sumInRow[p];
}
ans = Math.max(ans, mergeSort(sum, 0, m + 1, k));
if (ans == k)
return k;
}
}
return ans;
}
int mergeSort(long[] sum, int start, int end, int k) {
if (end == start + 1)
return Integer.MIN_VALUE;
int mid = start + (end - start) / 2, cnt = 0;
int ans = mergeSort(sum, start, mid, k);
if (ans == k)
return k;
ans = Math.max(ans, mergeSort(sum, mid, end, k));
if (ans == k)
return k;
long[] cache = new long[end - start];
for (int i = start, j = mid, p = mid; i < mid; ++i) {
while (j < end && sum[j] - sum[i] <= k){
++j;
}
if (j - 1 >= mid) {<|fim▁hole|> return k;
}
}
while (p < end && sum[p] < sum[i]){
cache[cnt++] = sum[p++];
}
cache[cnt++] = sum[i];
}
System.arraycopy(cache, 0, sum, start, cnt);
return ans;
}
}<|fim▁end|>
|
ans = Math.max(ans, (int) (sum[j - 1] - sum[i]));
if (ans == k){
|
<|file_name|>FLyrVect.java<|end_file_name|><|fim▁begin|>/* gvSIG. Sistema de Información Geográfica de la Generalitat Valenciana
*
* Copyright (C) 2004 IVER T.I. and Generalitat Valenciana.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,USA.
*
* For more information, contact:
*
* Generalitat Valenciana
* Conselleria d'Infraestructures i Transport
* Av. Blasco Ibáñez, 50
* 46010 VALENCIA
* SPAIN
*
* +34 963862235
* [email protected]
* www.gvsig.gva.es
*
* or
*
* IVER T.I. S.A
* Salamanca 50
* 46005 Valencia
* Spain
*
* +34 963163400
* [email protected]
*/
package com.iver.cit.gvsig.fmap.layers;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.net.URI;
import java.util.ArrayList;
import javax.print.attribute.PrintRequestAttributeSet;
import javax.print.attribute.standard.PrintQuality;
import org.apache.log4j.Logger;
import org.cresques.cts.ICoordTrans;
import org.gvsig.tools.file.PathGenerator;
import com.hardcode.gdbms.driver.exceptions.ReadDriverException;
import com.hardcode.gdbms.engine.data.DataSourceFactory;
import com.hardcode.gdbms.engine.data.NoSuchTableException;
import com.hardcode.gdbms.engine.data.driver.DriverException;
import com.hardcode.gdbms.engine.instruction.FieldNotFoundException;
import com.iver.cit.gvsig.exceptions.expansionfile.ExpansionFileReadException;
import com.iver.cit.gvsig.exceptions.layers.LegendLayerException;
import com.iver.cit.gvsig.exceptions.layers.ReloadLayerException;
import com.iver.cit.gvsig.exceptions.layers.ReprojectLayerException;
import com.iver.cit.gvsig.exceptions.layers.StartEditionLayerException;
import com.iver.cit.gvsig.exceptions.visitors.StartWriterVisitorException;
import com.iver.cit.gvsig.exceptions.visitors.VisitorException;
import com.iver.cit.gvsig.fmap.MapContext;
import com.iver.cit.gvsig.fmap.MapControl;
import com.iver.cit.gvsig.fmap.ViewPort;
import com.iver.cit.gvsig.fmap.core.CartographicSupport;
import com.iver.cit.gvsig.fmap.core.FPoint2D;
import com.iver.cit.gvsig.fmap.core.FShape;
import com.iver.cit.gvsig.fmap.core.IFeature;
import com.iver.cit.gvsig.fmap.core.IGeometry;
import com.iver.cit.gvsig.fmap.core.ILabelable;
import com.iver.cit.gvsig.fmap.core.IRow;
import com.iver.cit.gvsig.fmap.core.symbols.IMultiLayerSymbol;
import com.iver.cit.gvsig.fmap.core.symbols.ISymbol;
import com.iver.cit.gvsig.fmap.core.symbols.SimpleLineSymbol;
import com.iver.cit.gvsig.fmap.core.v02.FConverter;
import com.iver.cit.gvsig.fmap.core.v02.FSymbol;
import com.iver.cit.gvsig.fmap.drivers.BoundedShapes;
import com.iver.cit.gvsig.fmap.drivers.IFeatureIterator;
import com.iver.cit.gvsig.fmap.drivers.IVectorialDatabaseDriver;
import com.iver.cit.gvsig.fmap.drivers.VectorialDriver;
import com.iver.cit.gvsig.fmap.drivers.WithDefaultLegend;
import com.iver.cit.gvsig.fmap.drivers.featureiterators.JoinFeatureIterator;
import com.iver.cit.gvsig.fmap.edition.AfterFieldEditEvent;
import com.iver.cit.gvsig.fmap.edition.AfterRowEditEvent;
import com.iver.cit.gvsig.fmap.edition.AnnotationEditableAdapter;
import com.iver.cit.gvsig.fmap.edition.BeforeFieldEditEvent;
import com.iver.cit.gvsig.fmap.edition.BeforeRowEditEvent;
import com.iver.cit.gvsig.fmap.edition.EditionEvent;
import com.iver.cit.gvsig.fmap.edition.IEditionListener;
import com.iver.cit.gvsig.fmap.edition.ISpatialWriter;
import com.iver.cit.gvsig.fmap.edition.IWriteable;
import com.iver.cit.gvsig.fmap.edition.IWriter;
import com.iver.cit.gvsig.fmap.edition.VectorialEditableAdapter;
import com.iver.cit.gvsig.fmap.edition.VectorialEditableDBAdapter;
import com.iver.cit.gvsig.fmap.layers.layerOperations.AlphanumericData;
import com.iver.cit.gvsig.fmap.layers.layerOperations.ClassifiableVectorial;
import com.iver.cit.gvsig.fmap.layers.layerOperations.InfoByPoint;
import com.iver.cit.gvsig.fmap.layers.layerOperations.RandomVectorialData;
import com.iver.cit.gvsig.fmap.layers.layerOperations.SingleLayer;
import com.iver.cit.gvsig.fmap.layers.layerOperations.VectorialData;
import com.iver.cit.gvsig.fmap.layers.layerOperations.VectorialXMLItem;
import com.iver.cit.gvsig.fmap.layers.layerOperations.XMLItem;
import com.iver.cit.gvsig.fmap.operations.strategies.FeatureVisitor;
import com.iver.cit.gvsig.fmap.operations.strategies.Strategy;
import com.iver.cit.gvsig.fmap.operations.strategies.StrategyManager;
import com.iver.cit.gvsig.fmap.rendering.IClassifiedVectorLegend;
import com.iver.cit.gvsig.fmap.rendering.ILegend;
import com.iver.cit.gvsig.fmap.rendering.IVectorLegend;
import com.iver.cit.gvsig.fmap.rendering.LegendClearEvent;
import com.iver.cit.gvsig.fmap.rendering.LegendContentsChangedListener;
import com.iver.cit.gvsig.fmap.rendering.LegendFactory;
import com.iver.cit.gvsig.fmap.rendering.SingleSymbolLegend;
import com.iver.cit.gvsig.fmap.rendering.SymbolLegendEvent;
import com.iver.cit.gvsig.fmap.rendering.ZSort;
import com.iver.cit.gvsig.fmap.rendering.styling.labeling.AttrInTableLabelingStrategy;
import com.iver.cit.gvsig.fmap.rendering.styling.labeling.ILabelingStrategy;
import com.iver.cit.gvsig.fmap.rendering.styling.labeling.LabelingFactory;
import com.iver.cit.gvsig.fmap.spatialindex.IPersistentSpatialIndex;
import com.iver.cit.gvsig.fmap.spatialindex.ISpatialIndex;
import com.iver.cit.gvsig.fmap.spatialindex.QuadtreeGt2;
import com.iver.cit.gvsig.fmap.spatialindex.QuadtreeJts;
import com.iver.cit.gvsig.fmap.spatialindex.SpatialIndexException;
import com.iver.utiles.FileUtils;
import com.iver.utiles.IPersistence;
import com.iver.utiles.NotExistInXMLEntity;
import com.iver.utiles.PostProcessSupport;
import com.iver.utiles.XMLEntity;
import com.iver.utiles.swing.threads.Cancellable;
import com.iver.utiles.swing.threads.CancellableMonitorable;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.Polygon;
import com.vividsolutions.jts.geom.TopologyException;
/**
* Capa básica Vectorial.
*
* @author Fernando González Cortés
*/
public class FLyrVect extends FLyrDefault implements ILabelable,
ClassifiableVectorial, SingleLayer, VectorialData, RandomVectorialData,
AlphanumericData, InfoByPoint, SelectionListener, IEditionListener, LegendContentsChangedListener {
private static Logger logger = Logger.getLogger(FLyrVect.class.getName());
/** Leyenda de la capa vectorial */
private IVectorLegend legend;
private int typeShape = -1;
private ReadableVectorial source;
private SelectableDataSource sds;
private SpatialCache spatialCache = new SpatialCache();
private boolean spatialCacheEnabled = false;
/**
* An implementation of gvSIG spatial index
*/
protected ISpatialIndex spatialIndex = null;
private boolean bHasJoin = false;
private XMLEntity orgXMLEntity = null;
private XMLEntity loadSelection = null;
private IVectorLegend loadLegend = null;
//Lo añado. Características de HyperEnlace (LINK)
private FLyrVectLinkProperties linkProperties=new FLyrVectLinkProperties();
//private ArrayList linkProperties=null;
private boolean waitTodraw=false;
private static PathGenerator pathGenerator=PathGenerator.getInstance();
public boolean isWaitTodraw() {
return waitTodraw;
}
public void setWaitTodraw(boolean waitTodraw) {
this.waitTodraw = waitTodraw;
}
/**
* Devuelve el VectorialAdapater de la capa.
*
* @return VectorialAdapter.
*/
public ReadableVectorial getSource() {
if (!this.isAvailable()) return null;
return source;
}
/**
* If we use a persistent spatial index associated with this layer, and the
* index is not intrisic to the layer (for example spatial databases) this
* method looks for existent spatial index, and loads it.
*
*/
private void loadSpatialIndex() {
//FIXME: Al abrir el indice en fichero...
//¿Cómo lo liberamos? un metodo Layer.shutdown()
ReadableVectorial source = getSource();
//REVISAR QUE PASA CON LOS DRIVERS DXF, DGN, etc.
//PUES SON VECTORIALFILEADAPTER
if (!(source instanceof VectorialFileAdapter)) {
// we are not interested in db adapters
return;
}
VectorialDriver driver = source.getDriver();
if (!(driver instanceof BoundedShapes)) {
// we dont spatially index layers that are not bounded
return;
}
File file = ((VectorialFileAdapter) source).getFile();
String fileName = file.getAbsolutePath();
File sptFile = new File(fileName + ".qix");
if (!sptFile.exists() || (!(sptFile.length() > 0))) {
// before to exit, look for it in temp path
String tempPath = System.getProperty("java.io.tmpdir");
fileName = tempPath + File.separator + sptFile.getName();
sptFile = new File(fileName);
// it doesnt exists, must to create
if (!sptFile.exists() || (!(sptFile.length() > 0))) {
return;
}// if
}// if
try {
source.start();
spatialIndex = new QuadtreeGt2(FileUtils.getFileWithoutExtension(sptFile),
"NM", source.getFullExtent(), source.getShapeCount(), false);
source.setSpatialIndex(spatialIndex);
} catch (SpatialIndexException e) {
spatialIndex = null;
e.printStackTrace();
return;
} catch (ReadDriverException e) {
spatialIndex = null;
e.printStackTrace();
return;
}
}
/**
* Checks if it has associated an external spatial index
* (an spatial index file).
*
* It looks for it in main file path, or in temp system path.
* If main file is rivers.shp, it looks for a file called
* rivers.shp.qix.
* @return
*/
public boolean isExternallySpatiallyIndexed() {
/*
* FIXME (AZABALA): Independizar del tipo de fichero de índice
* con el que se trabaje (ahora mismo considera la extension .qix,
* pero esto dependerá del tipo de índice)
* */
ReadableVectorial source = getSource();
if (!(source instanceof VectorialFileAdapter)) {
// we are not interested in db adapters.
// think in non spatial dbs, like HSQLDB
return false;
}
File file = ((VectorialFileAdapter) source).getFile();
String fileName = file.getAbsolutePath();
File sptFile = new File(fileName + ".qix");
if (!sptFile.exists() || (!(sptFile.length() > 0))) {
// before to exit, look for it in temp path
// it doesnt exists, must to create
String tempPath = System.getProperty("java.io.tmpdir");
fileName = tempPath + File.separator + sptFile.getName();
sptFile = new File(fileName);
if (!sptFile.exists() || (!(sptFile.length() > 0))) {
return false;
}// if
}// if
return true;
}
/**
* Inserta el VectorialAdapter a la capa.
*
* @param va
* VectorialAdapter.
*/
public void setSource(ReadableVectorial rv) {
source = rv;
// azabala: we check if this layer could have a file spatial index
// and load it if it exists
loadSpatialIndex();
}
public Rectangle2D getFullExtent() throws ReadDriverException, ExpansionFileReadException {
Rectangle2D rAux;
source.start();
rAux = (Rectangle2D)source.getFullExtent().clone();
source.stop();
// Si existe reproyección, reproyectar el extent
if (!(this.getProjection()!=null &&
this.getMapContext().getProjection()!=null &&
this.getProjection().getAbrev().equals(this.getMapContext().getProjection().getAbrev()))){
ICoordTrans ct = getCoordTrans();
try{
if (ct != null) {
Point2D pt1 = new Point2D.Double(rAux.getMinX(), rAux.getMinY());
Point2D pt2 = new Point2D.Double(rAux.getMaxX(), rAux.getMaxY());
pt1 = ct.convert(pt1, null);
pt2 = ct.convert(pt2, null);
rAux = new Rectangle2D.Double();
rAux.setFrameFromDiagonal(pt1, pt2);
}
}catch (IllegalStateException e) {
this.setAvailable(false);
this.addError(new ReprojectLayerException(getName(), e));
}
}
//Esto es para cuando se crea una capa nueva con el fullExtent de ancho y alto 0.
if (rAux.getWidth()==0 && rAux.getHeight()==0) {
rAux=new Rectangle2D.Double(0,0,100,100);
}
return rAux;
}
/**
* Draws using IFeatureIterator. This method will replace the old draw(...) one.
* @autor jaume dominguez faus - [email protected]
* @param image
* @param g
* @param viewPort
* @param cancel
* @param scale
* @throws ReadDriverException
*/
private void _draw(BufferedImage image, Graphics2D g, ViewPort viewPort,
Cancellable cancel, double scale) throws ReadDriverException {
boolean bDrawShapes = true;
if (legend instanceof SingleSymbolLegend) {
bDrawShapes = legend.getDefaultSymbol().isShapeVisible();
}
Point2D offset = viewPort.getOffset();
double dpi = MapContext.getScreenDPI();
if (bDrawShapes) {
boolean cacheFeatures = isSpatialCacheEnabled();
SpatialCache cache = null;
if (cacheFeatures) {
getSpatialCache().clearAll();
cache = getSpatialCache();
}
try {
ArrayList<String> fieldList = new ArrayList<String>();
// fields from legend
String[] aux = null;
if (legend instanceof IClassifiedVectorLegend) {
aux = ((IClassifiedVectorLegend) legend).getClassifyingFieldNames();
if (aux!=null) {
for (int i = 0; i < aux.length; i++) {
// check fields exists
if (sds.getFieldIndexByName(aux[i]) == -1) {
logger.warn("Error en leyenda de " + getName() +
". El campo " + aux[i] + " no está.");
legend = LegendFactory.createSingleSymbolLegend(getShapeType());
break;
}
fieldList.add(aux[i]);
}
}
}
// Get the iterator over the visible features
IFeatureIterator it = null;
if (isJoined()) {
it = new JoinFeatureIterator(this, viewPort,
fieldList.toArray(new String[fieldList.size()]));
}
else {
ReadableVectorial rv=getSource();
// rv.start();
it = rv.getFeatureIterator(
viewPort.getAdjustedExtent(),
fieldList.toArray(new String[fieldList.size()]),
viewPort.getProjection(),
true);
// rv.stop();
}
ZSort zSort = ((IVectorLegend) getLegend()).getZSort();
boolean bSymbolLevelError = false;
// if layer has map levels it will use a ZSort
boolean useZSort = zSort != null && zSort.isUsingZSort();
// -- visual FX stuff
long time = System.currentTimeMillis();
BufferedImage virtualBim;
Graphics2D virtualGraphics;
// render temporary map each screenRefreshRate milliseconds;
int screenRefreshDelay = (int) ((1D/MapControl.getDrawFrameRate())*3*1000);
BufferedImage[] imageLevels = null;
Graphics2D[] graphics = null;
if (useZSort) {
imageLevels = new BufferedImage[zSort.getLevelCount()];
graphics = new Graphics2D[imageLevels.length];
for (int i = 0; !cancel.isCanceled() && i < imageLevels.length; i++) {
imageLevels[i] = new BufferedImage(image.getWidth(), image.getHeight(), image.getType());
graphics[i] = imageLevels[i].createGraphics();
graphics[i].setTransform(g.getTransform());
graphics[i].setRenderingHints(g.getRenderingHints());
}
}
// -- end visual FX stuff
boolean isInMemory = false;
if (getSource().getDriverAttributes() != null){
isInMemory = getSource().getDriverAttributes().isLoadedInMemory();
}
SelectionSupport selectionSupport=getSelectionSupport();
// Iteration over each feature
while ( !cancel.isCanceled() && it.hasNext()) {
IFeature feat = it.next();
IGeometry geom = null;
if (isInMemory){
geom = feat.getGeometry().cloneGeometry();
}else{
geom = feat.getGeometry();
}
if (cacheFeatures) {
if (cache.getMaxFeatures() >= cache.size()) {
// already reprojected
cache.insert(geom.getBounds2D(), geom);
}
}
// retrieve the symbol associated to such feature
ISymbol sym = legend.getSymbolByFeature(feat);
if (sym == null) continue;
//Código para poder acceder a los índices para ver si está seleccionado un Feature
ReadableVectorial rv=getSource();
int selectionIndex=-1;
if (rv instanceof ISpatialDB){
selectionIndex = ((ISpatialDB)rv).getRowIndexByFID(feat);
}else{
selectionIndex = Integer.parseInt(feat.getID());
}
if (selectionIndex!=-1) {
if (selectionSupport.isSelected(selectionIndex)) {
sym = sym.getSymbolForSelection();
}
}
// Check if this symbol is sized with CartographicSupport
CartographicSupport csSym = null;
int symbolType = sym.getSymbolType();
boolean bDrawCartographicSupport = false;
if ( symbolType == FShape.POINT
|| symbolType == FShape.LINE
|| sym instanceof CartographicSupport) {
// patch
if (!sym.getClass().equals(FSymbol.class)) {
csSym = (CartographicSupport) sym;
bDrawCartographicSupport = (csSym.getUnit() != -1);
}
}
int x = -1;
int y = -1;
int[] xyCoords = new int[2];
// Check if size is a pixel
boolean onePoint = bDrawCartographicSupport ?
isOnePoint(g.getTransform(), viewPort, MapContext.getScreenDPI(), csSym, geom, xyCoords) :
isOnePoint(g.getTransform(), viewPort, geom, xyCoords);
// Avoid out of bounds exceptions
if (onePoint) {
x = xyCoords[0];
y = xyCoords[1];
if (x<0 || y<0 || x>= viewPort.getImageWidth() || y>=viewPort.getImageHeight()) continue;
}
if (useZSort) {
// Check if this symbol is a multilayer
int[] symLevels = zSort.getLevels(sym);
if (sym instanceof IMultiLayerSymbol) {
// if so, treat each of its layers as a single symbol
// in its corresponding map level
IMultiLayerSymbol mlSym = (IMultiLayerSymbol) sym;
for (int i = 0; !cancel.isCanceled() && i < mlSym.getLayerCount(); i++) {
ISymbol mySym = mlSym.getLayer(i);
int symbolLevel = 0;
if (symLevels != null) {
symbolLevel = symLevels[i];
} else {
/* an error occured when managing symbol levels.
* some of the legend changed events regarding the
* symbols did not finish satisfactory and the legend
* is now inconsistent. For this drawing, it will finish
* as it was at the bottom (level 0) but, when done, the
* ZSort will be reset to avoid app crashes. This is
* a bug that has to be fixed.
*/
bSymbolLevelError = true;
}
if (onePoint) {
if (x<0 || y<0 || x>= imageLevels[symbolLevel].getWidth() || y>=imageLevels[symbolLevel].getHeight()) continue;
imageLevels[symbolLevel].setRGB(x, y, mySym.getOnePointRgb());
} else {
if (!bDrawCartographicSupport) {
geom.drawInts(graphics[symbolLevel], viewPort, mySym, cancel);
} else {
geom.drawInts(graphics[symbolLevel], viewPort, dpi, (CartographicSupport) mySym, cancel);
}
}
}
} else {
// else, just draw the symbol in its level
int symbolLevel = 0;
if (symLevels != null) {
symbolLevel=symLevels[0];
} else {
/* If symLevels == null
* an error occured when managing symbol levels.
* some of the legend changed events regarding the
* symbols did not finish satisfactory and the legend
* is now inconsistent. For this drawing, it will finish
* as it was at the bottom (level 0). This is
* a bug that has to be fixed.
*/
// bSymbolLevelError = true;
}
if (!bDrawCartographicSupport) {
geom.drawInts(graphics[symbolLevel], viewPort, sym, cancel);
} else {
geom.drawInts(graphics[symbolLevel], viewPort, dpi, csSym, cancel);
}
}
// -- visual FX stuff
// Cuando el offset!=0 se está dibujando sobre el Layout y por tanto no tiene que ejecutar el siguiente código.
if (offset.getX()==0 && offset.getY()==0)
if ((System.currentTimeMillis() - time) > screenRefreshDelay) {
virtualBim = new BufferedImage(image.getWidth(),image.getHeight(),BufferedImage.TYPE_INT_ARGB);
virtualGraphics = virtualBim.createGraphics();
virtualGraphics.drawImage(image,0,0, null);
for (int i = 0; !cancel.isCanceled() && i < imageLevels.length; i++) {
virtualGraphics.drawImage(imageLevels[i],0,0, null);
}
g.clearRect(0, 0, image.getWidth(), image.getHeight());
g.drawImage(virtualBim, 0, 0, null);
time = System.currentTimeMillis();
}
// -- end visual FX stuff
} else {
// no ZSort, so there is only a map level, symbols are
// just drawn.
if (onePoint) {
if (x<0 || y<0 || x>= image.getWidth() || y>=image.getHeight()) continue;
image.setRGB(x, y, sym.getOnePointRgb());
} else {
if (!bDrawCartographicSupport) {
geom.drawInts(g, viewPort, sym, cancel);
} else {
geom.drawInts(g, viewPort, dpi, csSym,
cancel);
}
}
}
}
if (useZSort) {
g.drawImage(image, (int)offset.getX(), (int)offset.getY(), null);
g.translate(offset.getX(), offset.getY());
for (int i = 0; !cancel.isCanceled() && i < imageLevels.length; i++) {
g.drawImage(imageLevels[i],0,0, null);
imageLevels[i] = null;
graphics[i] = null;
}
g.translate(-offset.getX(), -offset.getY());
imageLevels = null;
graphics = null;
}
it.closeIterator();
if (bSymbolLevelError) {
((IVectorLegend) getLegend()).setZSort(null);
}
} catch (ReadDriverException e) {
this.setVisible(false);
this.setActive(false);
throw e;
}
}
}
public void draw(BufferedImage image, Graphics2D g, ViewPort viewPort,
Cancellable cancel, double scale) throws ReadDriverException {
if (isWaitTodraw()) {
return;
}
if (getStrategy() != null) {
getStrategy().draw(image, g, viewPort, cancel);
}
else {
_draw(image, g, viewPort, cancel, scale);
}
}
public void _print(Graphics2D g, ViewPort viewPort, Cancellable cancel,
double scale, PrintRequestAttributeSet properties, boolean highlight) throws ReadDriverException {
boolean bDrawShapes = true;
boolean cutGeom = true;
if (legend instanceof SingleSymbolLegend) {
bDrawShapes = legend.getDefaultSymbol().isShapeVisible();
}
if (bDrawShapes) {
try {
double dpi = 72;
PrintQuality resolution=(PrintQuality)properties.get(PrintQuality.class);
if (resolution.equals(PrintQuality.NORMAL)){
dpi = 300;
} else if (resolution.equals(PrintQuality.HIGH)){
dpi = 600;
} else if (resolution.equals(PrintQuality.DRAFT)){
dpi = 72;
}
ArrayList<String> fieldList = new ArrayList<String>();
String[] aux;
// fields from legend
if (legend instanceof IClassifiedVectorLegend) {
aux = ((IClassifiedVectorLegend) legend).getClassifyingFieldNames();
for (int i = 0; i < aux.length; i++) {
fieldList.add(aux[i]);
}
}
//
// // fields from labeling
// if (isLabeled()) {
// aux = getLabelingStrategy().getUsedFields();
// for (int i = 0; i < aux.length; i++) {
// fieldList.add(aux[i]);
// }
// }
ZSort zSort = ((IVectorLegend) getLegend()).getZSort();
// if layer has map levels it will use a ZSort
boolean useZSort = zSort != null && zSort.isUsingZSort();
int mapLevelCount = (useZSort) ? zSort.getLevelCount() : 1;
for (int mapPass = 0; mapPass < mapLevelCount; mapPass++) {
// Get the iterator over the visible features
// IFeatureIterator it = getSource().getFeatureIterator(
// viewPort.getAdjustedExtent(),
// fieldList.toArray(new String[fieldList.size()]),
// viewPort.getProjection(),
// true);
IFeatureIterator it = null;
if (isJoined()) {
it = new JoinFeatureIterator(this, viewPort,
fieldList.toArray(new String[fieldList.size()]));
}
else {
it = getSource().getFeatureIterator(
viewPort.getAdjustedExtent(),
fieldList.toArray(new String[fieldList.size()]),
viewPort.getProjection(),
true);
}
// Iteration over each feature
while ( !cancel.isCanceled() && it.hasNext()) {
IFeature feat = it.next();
IGeometry geom = feat.getGeometry();
// retreive the symbol associated to such feature
ISymbol sym = legend.getSymbolByFeature(feat);
if (sym == null) {
continue;
}
SelectionSupport selectionSupport=getSelectionSupport();
if (highlight) {
//Código para poder acceder a los índices para ver si está seleccionado un Feature
ReadableVectorial rv=getSource();
int selectionIndex=-1;
if (rv instanceof ISpatialDB){
selectionIndex = ((ISpatialDB)rv).getRowIndexByFID(feat);
} else {
selectionIndex = Integer.parseInt(feat.getID());
}
if (selectionIndex!=-1) {
if (selectionSupport.isSelected(selectionIndex)) {
sym = sym.getSymbolForSelection();
}
}
}
if (useZSort) {
int[] symLevels = zSort.getLevels(sym);
if(symLevels != null){
// Check if this symbol is a multilayer
if (sym instanceof IMultiLayerSymbol) {
// if so, get the layer corresponding to the current
// level. If none, continue to next iteration
IMultiLayerSymbol mlSym = (IMultiLayerSymbol) sym;
for (int i = 0; i < mlSym.getLayerCount(); i++) {
ISymbol mySym = mlSym.getLayer(i);
if (symLevels[i] == mapPass) {
sym = mySym;
break;
}
System.out.println("avoided layer "+i+"of symbol '"+mlSym.getDescription()+"' (pass "+mapPass+")");
}
} else {
// else, just draw the symbol in its level
if (symLevels[0] != mapPass) {
System.out.println("avoided single layer symbol '"+sym.getDescription()+"' (pass "+mapPass+")");
continue;
}
}
}
}<|fim▁hole|> // Check if this symbol is sized with CartographicSupport
CartographicSupport csSym = null;
int symbolType = sym.getSymbolType();
if (symbolType == FShape.POINT
|| symbolType == FShape.LINE
|| sym instanceof CartographicSupport) {
csSym = (CartographicSupport) sym;
if (sym instanceof SimpleLineSymbol) {
SimpleLineSymbol lineSym = new SimpleLineSymbol();
lineSym.setXMLEntity(sym.getXMLEntity());
if (((SimpleLineSymbol) sym).getLineStyle()
.getArrowDecorator() != null) {
// Lines with decorators should not be cut
// because the decorators would be drawn in
// the wrong places
cutGeom = false;
if (!((SimpleLineSymbol) sym)
.getLineStyle().getArrowDecorator()
.isScaleArrow()) {
// Hack for increasing non-scaled arrow
// marker size, which usually looks
// smaller when printing
lineSym.getLineStyle()
.getArrowDecorator()
.getMarker()
.setSize(
lineSym.getLineStyle()
.getArrowDecorator()
.getMarker()
.getSize() * 3);
}
} else {
// Make default lines slightly thinner when
// printing
lineSym.setLineWidth(lineSym.getLineWidth() * 0.75);
}
csSym = lineSym;
}
}
//System.err.println("passada "+mapPass+" pinte símboll "+sym.getDescription());
// We check if the geometry seems to intersect with the
// view extent
Rectangle2D extent = viewPort.getExtent();
IGeometry geomToPrint = null;
if (cutGeom) {
try {
if (geom.fastIntersects(extent.getX(),
extent.getY(), extent.getWidth(),
extent.getHeight())) {
// If it does, then we create a rectangle
// based on
// the view extent and cut the geometries by
// it
// before drawing them
GeometryFactory geomF = new GeometryFactory();
Geometry intersection = geom
.toJTSGeometry()
.intersection(
new Polygon(
geomF.createLinearRing(new Coordinate[] {
new Coordinate(
extent.getMinX(),
extent.getMaxY()),
new Coordinate(
extent.getMaxX(),
extent.getMaxY()),
new Coordinate(
extent.getMaxX(),
extent.getMinY()),
new Coordinate(
extent.getMinX(),
extent.getMinY()),
new Coordinate(
extent.getMinX(),
extent.getMaxY()) }),
null, geomF));
if (!intersection.isEmpty()) {
geomToPrint = FConverter
.jts_to_igeometry(intersection);
}
}
} catch (TopologyException e) {
logger.warn(
"Some error happened while trying to cut a polygon with the view extent before printing (layer '"
+ this.getName()
+ "' / feature id "
+ feat.getID()
+ "). The whole polygon will be drawn. ",
e);
geomToPrint = geom;
}
} else {
geomToPrint = geom;
}
if (geomToPrint != null) {
if (csSym == null) {
geomToPrint.drawInts(g, viewPort, sym, null);
} else {
geomToPrint.drawInts(g, viewPort, dpi, csSym,
cancel);
}
}
}
it.closeIterator();
}
} catch (ReadDriverException e) {
this.setVisible(false);
this.setActive(false);
throw e;
}
}
}
public void print(Graphics2D g, ViewPort viewPort, Cancellable cancel,
double scale, PrintRequestAttributeSet properties) throws ReadDriverException {
print(g, viewPort, cancel, scale, properties, false);
}
public void print(Graphics2D g, ViewPort viewPort, Cancellable cancel,
double scale, PrintRequestAttributeSet properties, boolean highlight) throws ReadDriverException {
if (isVisible() && isWithinScale(scale)) {
_print(g, viewPort, cancel, scale, properties, highlight);
}
}
public void deleteSpatialIndex() {
//must we delete possible spatial indexes files?
spatialIndex = null;
}
/**
* <p>
* Creates an spatial index associated to this layer.
* The spatial index will used
* the native projection of the layer, so if the layer is reprojected, it will
* be ignored.
* </p>
* @param cancelMonitor instance of CancellableMonitorable that allows
* to monitor progress of spatial index creation, and cancel the process
*/
public void createSpatialIndex(CancellableMonitorable cancelMonitor){
// FJP: ESTO HABRÁ QUE CAMBIARLO. PARA LAS CAPAS SECUENCIALES, TENDREMOS
// QUE ACCEDER CON UN WHILE NEXT. (O mejorar lo de los FeatureVisitor
// para que acepten recorrer sin geometria, solo con rectangulos.
//If this vectorial layer is based in a spatial database, the spatial
//index is already implicit. We only will index file drivers
ReadableVectorial va = getSource();
//We must think in non spatial databases, like HSQLDB
if(!(va instanceof VectorialFileAdapter)){
return;
}
if (!(va.getDriver() instanceof BoundedShapes)) {
return;
}
File file = ((VectorialFileAdapter) va).getFile();
String fileName = file.getAbsolutePath();
ISpatialIndex localCopy = null;
try {
va.start();
localCopy = new QuadtreeGt2(fileName, "NM", va.getFullExtent(),
va.getShapeCount(), true);
} catch (SpatialIndexException e1) {
// Probably we dont have writing permissions
String directoryName = System.getProperty("java.io.tmpdir");
File newFile = new File(directoryName +
File.separator +
file.getName());
String newFileName = newFile.getName();
try {
localCopy = new QuadtreeGt2(newFileName, "NM", va.getFullExtent(),
va.getShapeCount(), true);
} catch (SpatialIndexException e) {
// if we cant build a file based spatial index, we'll build
// a pure memory spatial index
localCopy = new QuadtreeJts();
} catch (ReadDriverException e) {
localCopy = new QuadtreeJts();
}
} catch(Exception e){
e.printStackTrace();
}//try
BoundedShapes shapeBounds = (BoundedShapes) va.getDriver();
try {
for (int i=0; i < va.getShapeCount(); i++)
{
if(cancelMonitor != null){
if(cancelMonitor.isCanceled())
return;
cancelMonitor.reportStep();
}
Rectangle2D r = shapeBounds.getShapeBounds(i);
if(r != null)
localCopy.insert(r, i);
} // for
va.stop();
if(localCopy instanceof IPersistentSpatialIndex)
((IPersistentSpatialIndex) localCopy).flush();
spatialIndex = localCopy;
//vectorial adapter needs a reference to the spatial index, to solve
//request for feature iteration based in spatial queries
source.setSpatialIndex(spatialIndex);
} catch (ReadDriverException e) {
e.printStackTrace();
}
}
public void createSpatialIndex() {
createSpatialIndex(null);
}
public void process(FeatureVisitor visitor, FBitSet subset)
throws ReadDriverException, ExpansionFileReadException, VisitorException {
Strategy s = StrategyManager.getStrategy(this);
s.process(visitor, subset);
}
public void process(FeatureVisitor visitor) throws ReadDriverException, VisitorException {
Strategy s = StrategyManager.getStrategy(this);
s.process(visitor);
}
public void process(FeatureVisitor visitor, Rectangle2D rect)
throws ReadDriverException, ExpansionFileReadException, VisitorException {
Strategy s = StrategyManager.getStrategy(this);
s.process(visitor, rect);
}
public FBitSet queryByRect(Rectangle2D rect) throws ReadDriverException, VisitorException {
Strategy s = StrategyManager.getStrategy(this);
return s.queryByRect(rect);
}
public FBitSet queryByPoint(Point2D p, double tolerance)
throws ReadDriverException, VisitorException {
Strategy s = StrategyManager.getStrategy(this);
return s.queryByPoint(p, tolerance);
}
public FBitSet queryByShape(IGeometry g, int relationship)
throws ReadDriverException, VisitorException {
Strategy s = StrategyManager.getStrategy(this);
return s.queryByShape(g, relationship);
}
public XMLItem[] getInfo(Point p, double tolerance, Cancellable cancel) throws ReadDriverException, VisitorException {
Point2D pReal = this.getMapContext().getViewPort().toMapPoint(p);
FBitSet bs = queryByPoint(pReal, tolerance);
VectorialXMLItem[] item = new VectorialXMLItem[1];
item[0] = new VectorialXMLItem(bs, this);
return item;
}
public void setLegend(IVectorLegend r) throws LegendLayerException {
if (this.legend == r){
return;
}
if (this.legend != null && this.legend.equals(r)){
return;
}
IVectorLegend oldLegend = legend;
/*
* Parche para discriminar las leyendas clasificadas cuyos campos de
* clasificación no están en la fuente de la capa.
*
* Esto puede ocurrir porque en versiones anteriores se admitían
* leyendas clasificadas en capas que se han unido a una tabla
* por campos que pertenecían a la tabla y no sólo a la capa.
*
*/
// if(r instanceof IClassifiedVectorLegend){
// IClassifiedVectorLegend classifiedLegend = (IClassifiedVectorLegend)r;
// String[] fieldNames = classifiedLegend.getClassifyingFieldNames();
//
// for (int i = 0; i < fieldNames.length; i++) {
// try {
// if(this.getRecordset().getFieldIndexByName(fieldNames[i]) == -1){
//// if(this.getSource().getRecordset().getFieldIndexByName(fieldNames[i]) == -1){
// logger.warn("Some fields of the classification of the legend doesn't belong with the source of the layer.");
// if (this.legend == null){
// r = LegendFactory.createSingleSymbolLegend(this.getShapeType());
// } else {
// return;
// }
// }
// } catch (ReadDriverException e1) {
// throw new LegendLayerException(getName(),e1);
// }
// }
// }
/* Fin del parche */
legend = r;
try {
legend.setDataSource(getRecordset());
} catch (FieldNotFoundException e1) {
throw new LegendLayerException(getName(),e1);
} catch (ReadDriverException e1) {
throw new LegendLayerException(getName(),e1);
} finally{
this.updateDrawVersion();
}
if (oldLegend != null){
oldLegend.removeLegendListener(this);
}
if (legend != null){
legend.addLegendListener(this);
}
LegendChangedEvent e = LegendChangedEvent.createLegendChangedEvent(
oldLegend, legend);
e.setLayer(this);
callLegendChanged(e);
}
/**
* Devuelve la Leyenda de la capa.
*
* @return Leyenda.
*/
public ILegend getLegend() {
return legend;
}
/**
* Devuelve el tipo de shape que contiene la capa.
*
* @return tipo de shape.
*
* @throws DriverException
*/
public int getShapeType() throws ReadDriverException {
if (typeShape == -1) {
getSource().start();
typeShape = getSource().getShapeType();
getSource().stop();
}
return typeShape;
}
public XMLEntity getXMLEntity() throws XMLException {
if (!this.isAvailable() && this.orgXMLEntity != null) {
return this.orgXMLEntity;
}
XMLEntity xml = super.getXMLEntity();
if (getLegend()!=null)
xml.addChild(getLegend().getXMLEntity());
try {
if (getRecordset()!=null)
xml.addChild(getRecordset().getSelectionSupport().getXMLEntity());
} catch (ReadDriverException e1) {
e1.printStackTrace();
throw new XMLException(e1);
}
// Repongo el mismo ReadableVectorial más abajo para cuando se guarda el proyecto.
ReadableVectorial rv=getSource();
xml.putProperty("type", "vectorial");
if (source instanceof VectorialEditableAdapter) {
setSource(((VectorialEditableAdapter) source).getOriginalAdapter());
}
if (source instanceof VectorialFileAdapter) {
xml.putProperty("type", "vectorial");
xml.putProperty("absolutePath",((VectorialFileAdapter) source)
.getFile().getAbsolutePath());
xml.putProperty("file", pathGenerator.getPath(((VectorialFileAdapter) source)
.getFile().getAbsolutePath()));
try {
xml.putProperty("recordset-name", source.getRecordset()
.getName());
} catch (ReadDriverException e) {
throw new XMLException(e);
} catch (RuntimeException e) {
e.printStackTrace();
}
} else if (source instanceof VectorialDBAdapter) {
xml.putProperty("type", "vectorial");
IVectorialDatabaseDriver dbDriver = (IVectorialDatabaseDriver) source
.getDriver();
// Guardamos el nombre del driver para poder recuperarlo
// con el DriverManager de Fernando.
xml.putProperty("db", dbDriver.getName());
try {
xml.putProperty("recordset-name", source.getRecordset()
.getName());
} catch (ReadDriverException e) {
throw new XMLException(e);
} catch (RuntimeException e) {
e.printStackTrace();
}
xml.addChild(dbDriver.getXMLEntity()); // Tercer child. Antes hemos
// metido la leyenda y el
// selection support
} else if (source instanceof VectorialAdapter) {
// Se supone que hemos hecho algo genérico.
xml.putProperty("type", "vectorial");
VectorialDriver driver = source.getDriver();
// Guardamos el nombre del driver para poder recuperarlo
// con el DriverManager de Fernando.
xml.putProperty("other", driver.getName());
// try {
try {
xml.putProperty("recordset-name", source.getRecordset()
.getName());
} catch (ReadDriverException e) {
throw new XMLException(e);
} catch (RuntimeException e) {
e.printStackTrace();
}
if (driver instanceof IPersistence) {
// xml.putProperty("className", driver.getClass().getName());
IPersistence persist = (IPersistence) driver;
xml.addChild(persist.getXMLEntity()); // Tercer child. Antes
// hemos metido la
// leyenda y el
// selection support
}
}
if (rv!=null)
setSource(rv);
xml.putProperty("driverName", source.getDriver().getName());
if (bHasJoin)
xml.putProperty("hasJoin", "true");
// properties from ILabelable
xml.putProperty("isLabeled", isLabeled);
if (strategy != null) {
XMLEntity strategyXML = strategy.getXMLEntity();
strategyXML.putProperty("Strategy", strategy.getClassName());
xml.addChild(strategy.getXMLEntity());
}
xml.addChild(getLinkProperties().getXMLEntity());
return xml;
}
/*
* @see com.iver.cit.gvsig.fmap.layers.FLyrDefault#setXMLEntity(com.iver.utiles.XMLEntity)
*/
public void setXMLEntity(XMLEntity xml) throws XMLException {
try {
super.setXMLEntity(xml);
XMLEntity legendXML = xml.getChild(0);
IVectorLegend leg = LegendFactory.createFromXML(legendXML);
try {
getRecordset().getSelectionSupport().setXMLEntity(xml.getChild(1));
// JMVIVO: Esto sirve para algo????
/*
* Jaume: si, para restaurar el selectable datasource cuando se
* clona la capa, cuando se carga de un proyecto. Si no esta ya
* no se puede ni hacer consultas sql, ni hacer selecciones,
* ni usar la mayor parte de las herramientas.
*
* Lo vuelvo a poner.
*/
String recordsetName = xml.getStringProperty("recordset-name");
LayerFactory.getDataSourceFactory().changeDataSourceName(
getSource().getRecordset().getName(), recordsetName);
SelectableDataSource sds = new SelectableDataSource(LayerFactory
.getDataSourceFactory().createRandomDataSource(
recordsetName, DataSourceFactory.AUTOMATIC_OPENING));
} catch (NoSuchTableException e1) {
this.setAvailable(false);
throw new XMLException(e1);
} catch (ReadDriverException e1) {
this.setAvailable(false);
throw new XMLException(e1);
}
// Si tiene una unión, lo marcamos para que no se cree la leyenda hasta
// el final
// de la lectura del proyecto
if (xml.contains("hasJoin")) {
setIsJoined(true);
PostProcessSupport.addToPostProcess(this, "setLegend", leg, 1);
} else {
try {
setLegend(leg);
} catch (LegendLayerException e) {
throw new XMLException(e);
}
}
//Por compatibilidad con proyectos anteriores a la 1.0
boolean containsIsLabeled = xml.contains("isLabeled");
if (containsIsLabeled){
isLabeled = xml.getBooleanProperty("isLabeled");
}
// set properties for ILabelable
XMLEntity labelingXML = xml.firstChild("labelingStrategy", "labelingStrategy");
if (labelingXML!= null) {
if(!containsIsLabeled){
isLabeled = true;
}
try {
ILabelingStrategy labeling = LabelingFactory.createStrategyFromXML(labelingXML, this);
if (isJoined()) {
PostProcessSupport.addToPostProcess(this, "setLabelingStrategy", labeling, 1);
}
else
this.setLabelingStrategy(labeling);
} catch (NotExistInXMLEntity neXMLEX) {
// no strategy was set, just continue;
logger.warn("Reached what should be unreachable code");
}
} else if (legendXML.contains("labelFieldName")|| legendXML.contains("labelfield")) {
/* (jaume) begin patch;
* for backward compatibility purposes. Since gvSIG v1.1 labeling is
* no longer managed by the Legend but by the ILabelingStrategy. The
* following allows restoring older projects' labelings.
*/
String labelTextField = null;
if (legendXML.contains("labelFieldName")){
labelTextField = legendXML.getStringProperty("labelFieldName");
if (labelTextField != null) {
AttrInTableLabelingStrategy labeling = new AttrInTableLabelingStrategy();
labeling.setLayer(this);
int unit = 1;
boolean useFixedSize = false;
String labelFieldHeight = legendXML.getStringProperty("labelHeightFieldName");
labeling.setTextField(labelTextField);
if(labelFieldHeight!=null){
labeling.setHeightField(labelFieldHeight);
} else {
double size = -1;
for(int i=0; i<legendXML.getChildrenCount();i++){
XMLEntity xmlChild = legendXML.getChild(i);
if(xmlChild.contains("m_FontSize")){
double childFontSize = xmlChild.getDoubleProperty("m_FontSize");
if(size<0){
size = childFontSize;
useFixedSize = true;
} else {
useFixedSize = useFixedSize && (size==childFontSize);
}
if(xmlChild.contains("m_bUseFontSize")){
if(xmlChild.getBooleanProperty("m_bUseFontSize")){
unit = -1;
} else {
unit = 1;
}
}
}
}
labeling.setFixedSize(size/1.4);//Factor de corrección que se aplicaba antes en el etiquetado
}
labeling.setUsesFixedSize(useFixedSize);
labeling.setUnit(unit);
labeling.setRotationField(legendXML.getStringProperty("labelRotationFieldName"));
if (isJoined()) {
PostProcessSupport.addToPostProcess(this, "setLabelingStrategy", labeling, 1);
}
else
this.setLabelingStrategy(labeling);
this.setIsLabeled(true);
}
}else{
labelTextField = legendXML.getStringProperty("labelfield");
if (labelTextField != null) {
AttrInTableLabelingStrategy labeling = new AttrInTableLabelingStrategy();
labeling.setLayer(this);
int unit = 1;
boolean useFixedSize = false;
String labelFieldHeight = legendXML.getStringProperty("labelFieldHeight");
labeling.setTextField(labelTextField);
if(labelFieldHeight!=null){
labeling.setHeightField(labelFieldHeight);
} else {
double size = -1;
for(int i=0; i<legendXML.getChildrenCount();i++){
XMLEntity xmlChild = legendXML.getChild(i);
if(xmlChild.contains("m_FontSize")){
double childFontSize = xmlChild.getDoubleProperty("m_FontSize");
if(size<0){
size = childFontSize;
useFixedSize = true;
} else {
useFixedSize = useFixedSize && (size==childFontSize);
}
if(xmlChild.contains("m_bUseFontSize")){
if(xmlChild.getBooleanProperty("m_bUseFontSize")){
unit = -1;
} else {
unit = 1;
}
}
}
}
labeling.setFixedSize(size/1.4);//Factor de corrección que se aplicaba antes en el etiquetado
}
labeling.setUsesFixedSize(useFixedSize);
labeling.setUnit(unit);
labeling.setRotationField(legendXML.getStringProperty("labelFieldRotation"));
if (isJoined()) {
PostProcessSupport.addToPostProcess(this, "setLabelingStrategy", labeling, 1);
}
else
this.setLabelingStrategy(labeling);
this.setIsLabeled(true);
}
}
}else if(!containsIsLabeled){
isLabeled = false;
}
// compatibility with hyperlink from 1.9 alpha version... do we really need to be compatible with alpha versions??
XMLEntity xmlLinkProperties=xml.firstChild("typeChild", "linkProperties");
if (xmlLinkProperties != null){
try {
String fieldName=xmlLinkProperties.getStringProperty("fieldName");
xmlLinkProperties.remove("fieldName");
String extName = xmlLinkProperties.getStringProperty("extName");
xmlLinkProperties.remove("extName");
int typeLink = xmlLinkProperties.getIntProperty("typeLink");
xmlLinkProperties.remove("typeLink");
if (fieldName!=null) {
setProperty("legacy.hyperlink.selectedField", fieldName);
setProperty("legacy.hyperlink.type", new Integer(typeLink));
if (extName!=null) {
setProperty("legacy.hyperlink.extension", extName);
}
}
}
catch (NotExistInXMLEntity ex) {
logger.warn("Error getting old hyperlink configuration", ex);
}
}
} catch (XMLException e) {
this.setAvailable(false);
this.orgXMLEntity = xml;
} catch (Exception e) {
e.printStackTrace();
this.setAvailable(false);
this.orgXMLEntity = xml;
}
}
public void setXMLEntityNew(XMLEntity xml) throws XMLException {
try {
super.setXMLEntity(xml);
XMLEntity legendXML = xml.getChild(0);
IVectorLegend leg = LegendFactory.createFromXML(legendXML);
/* (jaume) begin patch;
* for backward compatibility purposes. Since gvSIG v1.1 labeling is
* no longer managed by the Legend but by the ILabelingStrategy. The
* following allows restoring older projects' labelings.
*/
if (legendXML.contains("labelFieldHeight")) {
AttrInTableLabelingStrategy labeling = new AttrInTableLabelingStrategy();
labeling.setLayer(this);
labeling.setTextField(legendXML.getStringProperty("labelFieldHeight"));
labeling.setRotationField(legendXML.getStringProperty("labelFieldRotation"));
this.setLabelingStrategy(labeling);
this.setIsLabeled(true);
}
/* end patch */
try {
getRecordset().getSelectionSupport().setXMLEntity(xml.getChild(1));
this.setLoadSelection(xml.getChild(1));
} catch (ReadDriverException e1) {
this.setAvailable(false);
throw new XMLException(e1);
}
// Si tiene una unión, lo marcamos para que no se cree la leyenda hasta
// el final
// de la lectura del proyecto
if (xml.contains("hasJoin")) {
setIsJoined(true);
PostProcessSupport.addToPostProcess(this, "setLegend", leg, 1);
} else {
this.setLoadLegend(leg);
}
} catch (XMLException e) {
this.setAvailable(false);
this.orgXMLEntity = xml;
} catch (Exception e) {
this.setAvailable(false);
this.orgXMLEntity = xml;
}
}
/**
* Sobreimplementación del método toString para que las bases de datos
* identifiquen la capa.
*
* @return DOCUMENT ME!
*/
public String toString() {
/*
* Se usa internamente para que la parte de datos identifique de forma
* unívoca las tablas
*/
String ret = super.toString();
return "layer" + ret.substring(ret.indexOf('@') + 1);
}
public boolean isJoined() {
return bHasJoin;
}
/**
* Returns if a layer is spatially indexed
*
* @return if this layer has the ability to proces spatial queries without
* secuential scans.
*/
public boolean isSpatiallyIndexed() {
ReadableVectorial source = getSource();
if (source instanceof ISpatialDB)
return true;
//FIXME azabala
/*
* Esto es muy dudoso, y puede cambiar.
* Estoy diciendo que las que no son fichero o no son
* BoundedShapes estan indexadas. Esto es mentira, pero
* así quien pregunte no querrá generar el indice.
* Esta por ver si interesa generar el indice para capas
* HSQLDB, WFS, etc.
*/
if(!(source instanceof VectorialFileAdapter)){
return true;
}
if (!(source.getDriver() instanceof BoundedShapes)) {
return true;
}
if (getISpatialIndex() != null)
return true;
return false;
}
public void setIsJoined(boolean hasJoin) {
bHasJoin = hasJoin;
}
/**
* @return Returns the spatialIndex.
*/
public ISpatialIndex getISpatialIndex() {
return spatialIndex;
}
/**
* Sets the spatial index. This could be useful if, for some
* reasons, you want to work with a distinct spatial index
* (for example, a spatial index which could makes nearest
* neighbour querys)
* @param spatialIndex
*/
public void setISpatialIndex(ISpatialIndex spatialIndex){
this.spatialIndex = spatialIndex;
}
public SelectableDataSource getRecordset() throws ReadDriverException {
if (!this.isAvailable()) return null;
if (sds == null) {
SelectableDataSource ds = source.getRecordset();
if (ds == null) {
return null;
}
sds = ds;
getSelectionSupport().addSelectionListener(this);
}
return sds;
}
public void setEditing(boolean b) throws StartEditionLayerException {
super.setEditing(b);
try {
if (b) {
VectorialEditableAdapter vea = null;
// TODO: Qué pasa si hay más tipos de adapters?
// FJP: Se podría pasar como argumento el
// VectorialEditableAdapter
// que se quiera usar para evitar meter código aquí de este
// estilo.
if (getSource() instanceof VectorialDBAdapter) {
vea = new VectorialEditableDBAdapter();
} else if (this instanceof FLyrAnnotation) {
vea = new AnnotationEditableAdapter(
(FLyrAnnotation) this);
} else {
vea = new VectorialEditableAdapter();
}
vea.addEditionListener(this);
vea.setOriginalVectorialAdapter(getSource());
// azo: implementations of readablevectorial need
//references of projection and spatial index
vea.setProjection(getProjection());
vea.setSpatialIndex(spatialIndex);
// /vea.setSpatialIndex(getSpatialIndex());
// /vea.setFullExtent(getFullExtent());
vea.setCoordTrans(getCoordTrans());
vea.startEdition(EditionEvent.GRAPHIC);
setSource(vea);
getRecordset().setSelectionSupport(
vea.getOriginalAdapter().getRecordset()
.getSelectionSupport());
} else {
VectorialEditableAdapter vea = (VectorialEditableAdapter) getSource();
vea.removeEditionListener(this);
setSource(vea.getOriginalAdapter());
}
// Si tenemos una leyenda, hay que pegarle el cambiazo a su
// recordset
setRecordset(getSource().getRecordset());
if (getLegend() instanceof IVectorLegend) {
IVectorLegend ley = (IVectorLegend) getLegend();
ley.setDataSource(getSource().getRecordset());
// Esto lo pongo para evitar que al dibujar sobre un
// dxf, dwg, o dgn no veamos nada. Es debido al checkbox
// de la leyenda de textos "dibujar solo textos".
//jaume
// if (!(getSource().getDriver() instanceof IndexedShpDriver)){
// FSymbol symbol=new FSymbol(getShapeType());
// symbol.setFontSizeInPixels(false);
// symbol.setFont(new Font("SansSerif", Font.PLAIN, 9));
// Color color=symbol.getColor();
// int alpha=symbol.getColor().getAlpha();
// if (alpha>250) {
// symbol.setColor(new Color(color.getRed(),color.getGreen(),color.getBlue(),100));
// }
// ley.setDefaultSymbol(symbol);
// }
//jaume//
ley.useDefaultSymbol(true);
}
} catch (ReadDriverException e) {
throw new StartEditionLayerException(getName(),e);
} catch (FieldNotFoundException e) {
throw new StartEditionLayerException(getName(),e);
} catch (StartWriterVisitorException e) {
throw new StartEditionLayerException(getName(),e);
}
setSpatialCacheEnabled(b);
callEditionChanged(LayerEvent
.createEditionChangedEvent(this, "edition"));
}
/**
* Para cuando haces una unión, sustituyes el recorset por el nuevo. De esta
* forma, podrás poner leyendas basadas en el nuevo recordset
*
* @param newSds
*/
public void setRecordset(SelectableDataSource newSds) {
// TODO: Deberiamos hacer comprobaciones del cambio
sds = newSds;
getSelectionSupport().addSelectionListener(this);
this.updateDrawVersion();
}
public void clearSpatialCache()
{
spatialCache.clearAll();
}
public boolean isSpatialCacheEnabled() {
return spatialCacheEnabled;
}
public void setSpatialCacheEnabled(boolean spatialCacheEnabled) {
this.spatialCacheEnabled = spatialCacheEnabled;
}
public SpatialCache getSpatialCache() {
return spatialCache;
}
/**
* Siempre es un numero mayor de 1000
* @param maxFeatures
*/
public void setMaxFeaturesInEditionCache(int maxFeatures) {
if (maxFeatures > spatialCache.maxFeatures)
spatialCache.setMaxFeatures(maxFeatures);
}
/**
* This method returns a boolean that is used by the FPopMenu
* to make visible the properties menu or not. It is visible by
* default, and if a later don't have to show this menu only
* has to override this method.
* @return
* If the properties menu is visible (or not)
*/
public boolean isPropertiesMenuVisible(){
return true;
}
public void reload() throws ReloadLayerException {
if(this.isEditing()){
throw new ReloadLayerException(getName());
}
this.setAvailable(true);
super.reload();
this.updateDrawVersion();
try {
this.source.getDriver().reload();
if (this.getLegend() == null) {
if (this.getRecordset().getDriver() instanceof WithDefaultLegend) {
WithDefaultLegend aux = (WithDefaultLegend) this.getRecordset().getDriver();
this.setLegend((IVectorLegend) aux.getDefaultLegend());
this.setLabelingStrategy(aux.getDefaultLabelingStrategy());
} else {
this.setLegend(LegendFactory.createSingleSymbolLegend(
this.getShapeType()));
}
}
} catch (LegendLayerException e) {
this.setAvailable(false);
throw new ReloadLayerException(getName(),e);
} catch (ReadDriverException e) {
this.setAvailable(false);
throw new ReloadLayerException(getName(),e);
}
}
protected void setLoadSelection(XMLEntity xml) {
this.loadSelection = xml;
}
protected void setLoadLegend(IVectorLegend legend) {
this.loadLegend = legend;
}
protected void putLoadSelection() throws XMLException {
if (this.loadSelection == null) return;
try {
this.getRecordset().getSelectionSupport().setXMLEntity(this.loadSelection);
} catch (ReadDriverException e) {
throw new XMLException(e);
}
this.loadSelection = null;
}
protected void putLoadLegend() throws LegendLayerException {
if (this.loadLegend == null) return;
this.setLegend(this.loadLegend);
this.loadLegend = null;
}
protected void cleanLoadOptions() {
this.loadLegend = null;
this.loadSelection = null;
}
public boolean isWritable() {
VectorialDriver drv = getSource().getDriver();
if (!drv.isWritable())
return false;
if (drv instanceof IWriteable)
{
IWriter writer = ((IWriteable)drv).getWriter();
if (writer != null)
{
if (writer instanceof ISpatialWriter)
return true;
}
}
return false;
}
public FLayer cloneLayer() throws Exception {
FLyrVect clonedLayer = new FLyrVect();
clonedLayer.setSource(getSource());
if (isJoined()) {
clonedLayer.setIsJoined(true);
clonedLayer.setRecordset(getRecordset());
}
clonedLayer.setVisible(isVisible());
clonedLayer.setISpatialIndex(getISpatialIndex());
clonedLayer.setName(getName());
clonedLayer.setCoordTrans(getCoordTrans());
clonedLayer.setLegend((IVectorLegend)getLegend().cloneLegend());
clonedLayer.setIsLabeled(isLabeled());
ILabelingStrategy labelingStrategy=getLabelingStrategy();
if (labelingStrategy!=null)
clonedLayer.setLabelingStrategy(labelingStrategy);
return clonedLayer;
}
public SelectionSupport getSelectionSupport() {
try {
return getRecordset().getSelectionSupport();
} catch (ReadDriverException e) {
e.printStackTrace();
}
return null;
}
protected boolean isOnePoint(AffineTransform graphicsTransform, ViewPort viewPort, double dpi, CartographicSupport csSym, IGeometry geom, int[] xyCoords) {
return isOnePoint(graphicsTransform, viewPort, geom, xyCoords) && csSym.getCartographicSize(viewPort, dpi, (FShape)geom.getInternalShape()) <= 1;
}
protected boolean isOnePoint(AffineTransform graphicsTransform, ViewPort viewPort, IGeometry geom, int[] xyCoords) {
boolean onePoint = false;
int type=geom.getGeometryType() % FShape.Z;
if (type!=FShape.POINT && type!=FShape.MULTIPOINT && type!=FShape.NULL) {
Rectangle2D geomBounds = geom.getBounds2D();
// ICoordTrans ct = getCoordTrans();
// Se supone que la geometria ya esta
// repoyectada y no hay que hacer
// ninguna transformacion
// if (ct!=null) {
//// geomBounds = ct.getInverted().convert(geomBounds);
// geomBounds = ct.convert(geomBounds);
// }
double dist1Pixel = viewPort.getDist1pixel();
onePoint = (geomBounds.getWidth() <= dist1Pixel
&& geomBounds.getHeight() <= dist1Pixel);
if (onePoint) {
// avoid out of range exceptions
FPoint2D p = new FPoint2D(geomBounds.getMinX(), geomBounds.getMinY());
p.transform(viewPort.getAffineTransform());
p.transform(graphicsTransform);
xyCoords[0] = (int) p.getX();
xyCoords[1] = (int) p.getY();
}
}
return onePoint;
}
/*
* jaume. Stuff from ILabeled.
*/
private boolean isLabeled;
private ILabelingStrategy strategy;
public boolean isLabeled() {
return isLabeled;
}
public void setIsLabeled(boolean isLabeled) {
this.isLabeled = isLabeled;
}
public ILabelingStrategy getLabelingStrategy() {
return strategy;
}
public void setLabelingStrategy(ILabelingStrategy strategy) {
this.strategy = strategy;
try {
strategy.setLayer(this);
} catch (ReadDriverException e) {
e.printStackTrace();
}
}
public void drawLabels(BufferedImage image, Graphics2D g, ViewPort viewPort,
Cancellable cancel, double scale, double dpi) throws ReadDriverException {
if (strategy!=null && isWithinScale(scale)) {
strategy.draw(image, g, viewPort, cancel, dpi);
}
}
public void printLabels(Graphics2D g, ViewPort viewPort,
Cancellable cancel, double scale, PrintRequestAttributeSet properties) throws ReadDriverException {
if (strategy!=null) {
strategy.print(g, viewPort, cancel, properties);
}
}
//Métodos para el uso de HyperLinks en capas FLyerVect
/**
* Return true, because a Vectorial Layer supports HyperLink
*/
public boolean allowLinks()
{
return true;
}
/**
* Returns an instance of AbstractLinkProperties that contains the information
* of the HyperLink
* @return Abstra
*/
public AbstractLinkProperties getLinkProperties()
{
return linkProperties;
}
/**
* Provides an array with URIs. Returns one URI by geometry that includes the point
* in its own geometry limits with a allowed tolerance.
* @param layer, the layer
* @param point, the point to check that is contained or not in the geometries in the layer
* @param tolerance, the tolerance allowed. Allowed margin of error to detect if the point
* is contained in some geometries of the layer
* @return
*/
public URI[] getLink(Point2D point, double tolerance)
{
//return linkProperties.getLink(this)
return linkProperties.getLink(this,point,tolerance);
}
public void selectionChanged(SelectionEvent e) {
this.updateDrawVersion();
}
public void afterFieldEditEvent(AfterFieldEditEvent e) {
this.updateDrawVersion();
}
public void afterRowEditEvent(IRow feat, AfterRowEditEvent e) {
this.updateDrawVersion();
}
public void beforeFieldEditEvent(BeforeFieldEditEvent e) {
}
public void beforeRowEditEvent(IRow feat, BeforeRowEditEvent e) {
}
public void processEvent(EditionEvent e) {
if (e.getChangeType()== e.ROW_EDITION){
this.updateDrawVersion();
}
}
public void legendCleared(LegendClearEvent event) {
this.updateDrawVersion();
LegendChangedEvent e = LegendChangedEvent.createLegendChangedEvent(
legend, legend);
this.callLegendChanged(e);
}
public boolean symbolChanged(SymbolLegendEvent e) {
this.updateDrawVersion();
LegendChangedEvent event = LegendChangedEvent.createLegendChangedEvent(
legend, legend);
this.callLegendChanged(event);
return true;
}
public String getTypeStringVectorLayer() throws ReadDriverException {
String typeString="";
int typeShape=this.getShapeType();
if (FShape.MULTI==typeShape){
ReadableVectorial rv=this.getSource();
int i=0;
boolean isCorrect=false;
while(rv.getShapeCount()>i && !isCorrect){
IGeometry geom=rv.getShape(i);
if (geom==null){
i++;
continue;
}
isCorrect=true;
if ((geom.getGeometryType() & FShape.Z) == FShape.Z){
typeString="Geometries3D";
}else{
typeString="Geometries2D";
}
}
}else{
ReadableVectorial rv=this.getSource();
int i=0;
boolean isCorrect=false;
while(rv.getShapeCount()>i && !isCorrect){
IGeometry geom=rv.getShape(i);
if (geom==null){
i++;
continue;
}
isCorrect=true;
int type=geom.getGeometryType();
if (FShape.POINT == type){
typeString="Point2D";
} else if (FShape.LINE == type){
typeString="Line2D";
} else if (FShape.POLYGON == type){
typeString="Polygon2D";
} else if (FShape.MULTIPOINT == type){
typeString="MultiPint2D";
} else if ((FShape.POINT | FShape.Z) == type ){
typeString="Point3D";
} else if ((FShape.LINE | FShape.Z) == type ){
typeString="Line3D";
} else if ((FShape.POLYGON | FShape.Z) == type ){
typeString="Polygon3D";
} else if ((FShape.MULTIPOINT | FShape.Z) == type ){
typeString="MultiPoint3D";
} else if ((FShape.POINT | FShape.M) == type ){
typeString="PointM";
} else if ((FShape.LINE | FShape.M) == type ){
typeString="LineM";
} else if ((FShape.POLYGON | FShape.M) == type ){
typeString="PolygonM";
} else if ((FShape.MULTIPOINT | FShape.M) == type ){
typeString="MultiPointM";
} else if ((FShape.MULTI | FShape.M) == type ){
typeString="M";
}
}
return typeString;
}
return "";
}
public int getTypeIntVectorLayer() throws ReadDriverException {
int typeInt=0;
int typeShape=this.getShapeType();
if (FShape.MULTI==typeShape){
ReadableVectorial rv=this.getSource();
int i=0;
boolean isCorrect=false;
while(rv.getShapeCount()>i && !isCorrect){
IGeometry geom=rv.getShape(i);
if (geom==null){
i++;
continue;
}
isCorrect=true;
if ((geom.getGeometryType() & FShape.Z) == FShape.Z){
typeInt=FShape.MULTI | FShape.Z;
}else{
typeInt=FShape.MULTI;
}
}
}else{
ReadableVectorial rv=this.getSource();
int i=0;
boolean isCorrect=false;
while(rv.getShapeCount()>i && !isCorrect){
IGeometry geom=rv.getShape(i);
if (geom==null){
i++;
continue;
}
isCorrect=true;
int type=geom.getGeometryType();
typeInt=type;
}
return typeInt;
}
return typeInt;
}
}<|fim▁end|>
| |
<|file_name|>pagination.js<|end_file_name|><|fim▁begin|>const findIndex = require('lodash/findIndex');
module.exports = (req, res, next) => {
const pageData = req.pageData || {};
const siblings = pageData.meta ? pageData.meta.siblings : [];
const pagination = pageData.pagination || {};
if (pageData && pageData.guide && siblings) {
const currentIndex = findIndex(siblings, (p) => {
return pageData.slug.indexOf(p.slug) !== -1;
});
if (currentIndex > 0) {
pagination.previous = siblings[currentIndex - 1];
}<|fim▁hole|> pagination.next = siblings[currentIndex + 1];
}
pageData.pagination = pagination;
}
// eslint-disable-next-line no-param-reassign
req.pageData = pageData;
next();
};<|fim▁end|>
|
if (siblings && currentIndex < siblings.length) {
|
<|file_name|>animate_test.go<|end_file_name|><|fim▁begin|>package animate
import (
"testing"
)<|fim▁hole|> testData := []struct {
text string
}{
{"funny cat"},
}
command := Animate()
for _, d := range testData {
rsp, err := command.Exec("animate", d.text)
if err != nil {
t.Fatal(err)
}
if rsp == nil {
t.Fatal("expected result, got nil")
}
}
}<|fim▁end|>
|
func TestGeocode(t *testing.T) {
|
<|file_name|>_importer.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------
# Copyright 2016 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and<|fim▁hole|># ------------------------------------------------------------------
""" _importer.py
Merge _yang_ns for subpackage to a single _yang_ns at runtime.
"""
import importlib
import pkgutil
from ydk import models
class YangNs(object):
def __init__(self, d):
self.__dict__ = d
_yang_ns_dict = {}
exempt_keys = set(['__builtins__', '__doc__', '__file__',
'__name__', '__package__'])
try:
_yang_ns = importlib.import_module('ydk.models._yang_ns')
except ImportError:
for (importer, name, ispkg) in pkgutil.iter_modules(models.__path__):
if ispkg:
try:
mod_yang_ns = importlib.import_module('ydk.models.%s._yang_ns' % name)
except ImportError:
continue
keys = set(mod_yang_ns.__dict__) - exempt_keys
for key in keys:
if key not in _yang_ns_dict:
_yang_ns_dict[key] = mod_yang_ns.__dict__[key]
else:
if isinstance(_yang_ns_dict[key], dict):
_yang_ns_dict[key].update(mod_yang_ns.__dict__[key])
else:
# shadow old value
_yang_ns_dict[key] = mod_yang_ns.__dict__[key]
_yang_ns = YangNs(_yang_ns_dict)<|fim▁end|>
|
# limitations under the License.
|
<|file_name|>08-maybe-and-optionals.js<|end_file_name|><|fim▁begin|>/* @flow */
import assert from 'assert';
import { CONVERSION_TABLE } from './06-export';
import type { Unit, UnitValue } from './06-export';
// We didn't cover any edge cases yet, so let's do this now
export function convertUnit(from: Unit, to: Unit, value: number): ?number {
if (from === to) {
return value;
}
// If there is no conversion possible, return null
// Note how we are using '== null' instead of '=== null'
// because the first notation will cover both cases, 'null'
// and 'undefined', which spares us a lot of extra code.
// You will need to set eslint's 'eqeqeq' rule to '[2, "smart"]'
if (CONVERSION_TABLE[from] == null || CONVERSION_TABLE[from][to] == null) {
return null;
}
const transform = CONVERSION_TABLE[from][to];
return transform(value);
}
// Intersection Type for assuming unit to be 'm'
// unit cannot be anything but a `Unit`, so we even
// prevent errors on definition
type MeterUnitValue = {
unit: 'm'
} & UnitValue;
// Convert whole UnitValues instead of single values
function convertToKm(unitValue: MeterUnitValue): ?UnitValue {
const { unit, value } = unitValue;
const converted = convertUnit(unit, 'km', value);
if (converted == null) {
return null;<|fim▁hole|> value: converted,
}
}
const value = convertToKm({ unit: 'm', value: 1500 });
assert.deepEqual(value, { unit: 'km', value: 1.5 });<|fim▁end|>
|
}
return {
unit: 'km',
|
<|file_name|>run_all.cpp<|end_file_name|><|fim▁begin|>/*
* run_all.cpp
*
* This file is part of NEST.
*
* Copyright (C) 2004 The NEST Initiative
*
* NEST is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* NEST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with NEST. If not, see <http://www.gnu.org/licenses/>.
*
*/
<|fim▁hole|>#define BOOST_TEST_DYN_LINK
#include <boost/test/included/unit_test.hpp>
// Includes from cpptests
#include "test_block_vector.h"
#include "test_enum_bitfield.h"
#include "test_parameter.h"
#include "test_sort.h"
#include "test_target_fields.h"<|fim▁end|>
|
#define BOOST_TEST_MODULE cpptests
|
<|file_name|>java_protobuf_library.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self, payload=None, buildflags=None, imports=None, **kwargs):
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-version
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
logger.warn(" Target definition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
self.add_labels('codegen')
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""<|fim▁hole|><|fim▁end|>
|
return self.payload.import_specs
|
<|file_name|>kms.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""<|fim▁hole|>import os
import platform
import commands
import redhat.kms
class ActivateCommand(commands.CommandBase):
def __init__(self, *args, **kwargs):
pass
@staticmethod
def detect_os():
"""
Return the Linux Distribution or other OS name
"""
translations = {"redhat": redhat}
system = os.uname()[0]
if system == "Linux":
system = platform.linux_distribution(full_distribution_name=0)[0]
# Arch Linux returns None for platform.linux_distribution()
if not system and os.path.exists('/etc/arch-release'):
system = 'arch'
if not system:
return None
system = system.lower()
global DEFAULT_HOSTNAME
DEFAULT_HOSTNAME = system
return translations.get(system)
@commands.command_add('kmsactivate')
def activate_cmd(self, data):
os_mod = self.detect_os()
if not os_mod:
raise SystemError("KMS not supported on this OS")
return os_mod.kms.kms_activate(data)<|fim▁end|>
|
JSON KMS activation
"""
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![cfg(test)]
mod interactive_time;<|fim▁hole|>mod paint_time;<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Markus Dittrich
// Licensed under BSD license, see LICENSE file for details
//
// st is a simple commandline helper script for calculating basic
// statistics on a data file consisting of column oriented
// floating point numbers.
// NOTE: Currently stats will read in all the data to compute the statistics
// and thus require memory on the order of the data set size.
use std::env;
use std::f64;
use std::fs::File;
use std::io::{BufRead, BufReader};
mod stats;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() <= 1 {
usage();
return;
}
let file_name = &args[1];
let file = File::open(file_name);
if file.is_err() {
usage();
return;
}
let buf = BufReader::new(file.unwrap());
let out = compute_statistics(buf);
let mut count = 0;
for o in out {
println!("Column {}: \n", count);
println!("#elems : {}", o.count);
println!("min : {}", o.min);
println!("max : {}", o.max);
println!("mean : {}", o.mean);
println!("median : {}", o.median);
println!("std : {}", o.sd);
println!("\n\n");
count += 1;
}
}
// compute_statistics computes basic statistics for the content of the
// provided BufReader
fn compute_statistics(mut buf: BufReader<File>) -> Vec<Output> {
// read first line to determine number of data columns
let mut out = Vec::new();
let s = &mut String::new();
let r = buf.read_line(s);
if r.is_err () {
return out;
}
let tokens : Vec<&str> = s.split(" ").collect();
for i in 0..tokens.len() {
out.push(Output::new());
let n_r = tokens[i].trim().parse::<f64>();
if n_r.is_err() {
println!("Warning {} in line {} is not a number. Skipping", s, out[i].count);
continue;
}
let n = n_r.unwrap();
out[i].update(n);
}
for line_r in buf.lines() {
let line = line_r.unwrap();
let tokens : Vec<&str> = line.split(" ").collect();
for i in 0..tokens.len() {
let n_r = tokens[i].trim().parse::<f64>();
if n_r.is_err() {
println!("Warning {} in line {} is not a number. Skipping", line, out[i].count);
continue;
}
let n = n_r.unwrap();
out[i].update(n);
}
}
for o in out.iter_mut() { //&mut out {
o.finalize();
}
out
}
// usage prints a short message describing the usage of the function
fn usage() {
println!("usage: stats <filename>");
}
// Output keeps track of the per column statistics
#[derive(Default)]
struct Output {
count: i64,
min: f64,
max: f64,
mean: f64,
median: f64,
sd: f64,
qk: f64,
mk: f64,
med: stats::Median,
}
impl Output {
fn new() -> Output {
Output { count: 0, min: f64::MAX, max: -f64::MAX, mean: 0.0,
median: 0.0, sd: 0.0, qk: 0.0, mk: 0.0, med: stats::Median::new()}
}
<|fim▁hole|> // update median
self.med.update(stats::FloatVal::new(v));
// update variance
let k: f64 = self.count as f64;
self.qk += (k - 1.0) * (v - self.mk) * (v - self.mk) / k;
self.mk += (v - self.mk) / k;
// update min, max, and mean
self.mean += v;
self.min = self.min.min(v);
self.max = self.max.max(v);
}
fn finalize(&mut self) {
let k: f64 = self.count as f64;
self.sd = (self.qk/(k-1.0)).sqrt();
self.mean /= k;
self.median = self.med.get();
}
}<|fim▁end|>
|
fn update(&mut self, v: f64) {
self.count += 1;
|
<|file_name|>sites.py<|end_file_name|><|fim▁begin|>class Gadgets(object):
"""
A Gadgets object providing managing of various gadgets for display on analytics dashboard.
Gadgets are registered with the Gadgets using the register() method.
"""
def __init__(self):
self._registry = {} # gadget hash -> gadget object.<|fim▁hole|> def get_gadgets(self):
return self._registry.values()
def register(self, gadget):
"""
Registers a gadget object.
If a gadget is already registered, this will raise AlreadyRegistered.
"""
self._registry[gadget.id] = gadget
gadgets = Gadgets()<|fim▁end|>
|
def get_gadget(self, id):
return self._registry[id]
|
<|file_name|>CONFIG.py<|end_file_name|><|fim▁begin|>"""
Configuration of 'memos' Flask app.
Edit to fit development or deployment environment.
"""
# import random
# localhost
# PORT = 5000
# DEBUG = True
# MONGO_PORT = 27017
# ix.cs.uoregon.edu
PORT = 7420 # random.randint(5000, 8000)<|fim▁hole|># both
MONGO_URL = "mongodb://memos_user:peach-cobbler@localhost:4152/memos"
# MONGO_USER = 'memos_user'
# MONGO_PW = 'peach-cobbler'
# MONGO_DB = 'memos'
# MONGO_URL = 'mongodb://{$MONGO_USER}:{$MONGO_PW}@localhost:{$MONGO_PORT}/{$MONGO_DB}'<|fim▁end|>
|
MONGO_PORT = 4152
DEBUG = False # Because it's unsafe to run outside localhost
|
<|file_name|>prog.py<|end_file_name|><|fim▁begin|>__author__ = 'Filushin_DV'
import generators
import tables
import profiler
from faker import Factory
date_start = '01/01/2015'
date_end = '06/06/2015'
citys = []
faker = Factory.create()
for i in range(100):
citys.append(faker.city())
valid_parents = (10, 2, 3)
field_list = []
field_list.append(tables.GenField('id', 'int', 1, 1000))
field_list.append(tables.GenField('id_parent', 'int', 1, 1, *valid_parents))
field_list.append(tables.GenField('name', 'str', 1, 5, *citys))
field_list.append(tables.GenField('is_delete', 'int', 0, 1))
field_list.append(tables.GenField('added', 'date', '01/01/2015', '06/06/2015'))
field_list.append(tables.GenField('edited', 'datetime', '01/01/2015', '06/06/2015'))
table = tables.GenTable('city', 5, *field_list)
with profiler.Profiler() as p:
gen = generators.SqlGenerator(table)
gen.generate_sql()
#gen.save_script('file.sql')
for line in gen.get_sql():
print(line)
<|fim▁hole|><|fim▁end|>
|
#print (string.punctuation)
|
<|file_name|>lib.py<|end_file_name|><|fim▁begin|>import sys
import os
import re
def human_size_to_byte(number):
"""
Convert number of these units to bytes, ignore case:
b : 512
kB : 1000
K : 1024
mB : 1000*1000
m : 1024*1024
MB : 1000*1000
M : 1024*1024
GB : 1000*1000*1000
G : 1024*1024*1024
TB : 1000*1000*1000*1000
T : 1024*1024*1024*1024
PB : 1000*1000*1000*1000*1000
P : 1024*1024*1024*1024*1024
EB : 1000*1000*1000*1000*1000*1000
E : 1024*1024*1024*1024*1024*1024
ZB : 1000*1000*1000*1000*1000*1000*1000
Z : 1024*1024*1024*1024*1024*1024*1024
YB : 1000*1000*1000*1000*1000*1000*1000*1000
Y : 1024*1024*1024*1024*1024*1024*1024*1024
number is of one of these forms:
123, 123b, 123M, 1G
"""
mapping = {
'b' : 512 ,
'kb' : 1000,
'k' : 1024,
'mb' : 1000**2,
'm' : 1024**2,
'gb' : 1000**3,
'g' : 1024**3,
'tb' : 1000**4,
't' : 1024**4,
'pb' : 1000**5,
'p' : 1024**5,
'eb' : 1000**6,
'e' : 1024**6,
'zb' : 1000**7,
'z' : 1024**7,
'yb' : 1000**8,
'y' : 1024**8,
}
unit = re.sub('^[0-9]+', '', number)
if unit:
unit = unit.lower()
assert unit in mapping.keys(), "wrong unit %s " % unit
amount = int(number[:-len(unit)])
return mapping[unit] * amount
else:
return int(number)
def correct_offset(file):
"""Due to Python cache issue, the real file offset of the
underlying file descriptor may differ, this function can correct
it.
"""
cur = file.seek(0, 1)
file.seek(0, 2)
file.seek(cur)
def open_file(file):
if file == '-':
return os.fdopen(sys.stdin.fileno(), 'rb')
else:
return open(file, 'rb')
class Locator:
"""Search from the end of the file backward, locate the starting
offset of the specified amount, measured by line, or by byte.
"""
def __init__(self, ifile, mode, amount, bs=8192):
"""mode can be 'lines' or 'bytes'"""
assert ifile.seekable(), "input file is not seekable"
self.orig_pos = ifile.seek(0, 1)
self.ifile = ifile
self.mode = mode
self.amount = amount
self.bs = bs
def find_line(self, ifile, chunk, amount):
""" Find if data chunk contains 'amount' number of lines.
Return value: (stat, pos, remaining-amount). If stat is True,
pos is the result, otherwise pos is not used, remaining-amount
is for the next run.
"""
count = chunk.count(b'\n')
if count <= amount:
amount -= count
return False, 0, amount
else: # found
pos = -1
for i in range(count - amount):
pos = chunk.index(b'\n', pos+1)
pos += 1
diff = len(chunk) - pos
pos = ifile.seek(-diff, 1)
return True, pos, 0
def find_byte(self, ifile, chunk, amount):
""" Find if data chunk contains 'amount' number of bytes.
Return value: (stat, pos, remaining-amount). If stat is True,
pos is the result, otherwise pos is not used, remaining-amount
is for the next run.
"""
length = len(chunk)
if length < amount:
amount -= length
return False, 0, amount
else: # found
pos = ifile.seek(-amount, 1)
return True, pos, 0
def find(self, ifile, offset, size, amount):
"""Read 'size' bytes starting from offset to find.
Return value: (stat, pos, remaining-amount). If stat is True,
pos is the result, otherwise pos is not used, remaining-amount
is for the next run.
"""
try:
pos = ifile.seek(offset)
except OSError:
assert False, "unkown file seeking failure"
chunk = ifile.read(size)
if self.mode == 'lines':
return self.find_line(ifile, chunk, amount)
else:
return self.find_byte(ifile, chunk, amount)
def run(self):
"""Find the offset of the last 'amount' lines"""
ifile = self.ifile
amount = self.amount
orig_pos = self.orig_pos
end = ifile.seek(0, 2) # jump to the end
# nothing to process, return the original position
total = end - orig_pos
if total <= amount:
correct_offset(ifile)
return orig_pos
bs = self.bs
# process the last block
remaining = total % bs
offset = end - remaining
stat, pos, amount = self.find(ifile, offset, remaining, amount)
while not stat and offset != orig_pos:
offset -= bs
stat, pos, amount = self.find(ifile, offset, bs, amount)
ifile.seek(self.orig_pos)
correct_offset(ifile)
return pos
class Buffer:
def __init__(self, amount):
self.min = amount
self.total = 0
self.data = []
def push(self, pair):
self.data.append(pair)
self.total += pair[0]
def pop(self):
pair = self.data.pop(0)
self.total -= pair[0]
return pair
def cut(self):
"""Pop as many pairs off the head of the self.data as
self.is_ready() is True, return a combined result.
"""
count = 0
data = b''
while self.is_ready():
x, y = self.pop()
count += x
data += y
return count, data
def is_satisfied(self):
"""The minimum amount is satisfied"""
return self.total >= self.min
def is_ready(self):
"""The buffer is ready to pop"""
return self.total - self.data[0][0] >= self.min
class HeadWorkerSL:
"""Seekable, line mode"""
def __init__(self, ifile, ofile, amount, bs=None):
self.ifile = ifile
self.ofile = ofile
self.amount = amount
self.bs = bs or 8192
def read(self):
return self.ifile.read(self.bs)
def transform(self, data):
return data.count(b'\n')
def is_last(self, count):
return count >= self.amount
def action(self, data, count):
self.ofile.write(data)
self.amount -= count
def handle_last(self, data):
pos = -1
for i in range(self.amount):
pos = data.index(b'\n', pos+1)
pos += 1
self.ofile.write(data[:pos])
over_read = len(data) - pos
try:
self.ifile.seek(-over_read, 1)
except Exception:
pass
def run(self):
while self.amount:
data = self.read()
if not data:
break
count = self.transform(data)
if self.is_last(count):
self.handle_last(data)
break
else:
self.action(data, count)
class HeadWorkerSB(HeadWorkerSL):
"""Seekable, byte mode"""
def transform(self, data):
return len(data)
def handle_last(self, data):
self.ofile.write(data[:self.amount])
over_read = len(data) - self.amount
try:
self.ifile.seek(-over_read, 1)
except Exception:
pass
class HeadWorkerTL(HeadWorkerSL):
"""Terminal, line mode"""
def read(self):
return self.ifile.readline()
def action(self, data, count):
self.ofile.write(data)
self.amount -= 1
self.ofile.flush()
def handle_last(self, data):
self.ofile.write(data)
self.ofile.flush()
class HeadWorkerTB(HeadWorkerSB):
"""Terminal, byte mode"""
def read(self):
return self.ifile.readline()
class HeadWorkerULIT(HeadWorkerSL):
"""Unseekable, line mode ignore tail"""
def __init__(self, ifile, ofile, amount, bs=None):
self.ifile = ifile
self.ofile = ofile
self.amount = amount
self.bs = bs or 8192
def read(self):
return self.ifile.read(self.bs)
def transform(self, data):
return data.count(b'\n')
def fill(self):
"""Fill up the buffer with content from self.ifile"""
amount = self.amount
buffer = Buffer(amount)
while True:
data = self.read()
if not data:
break
count = self.transform(data)
buffer.push((count, data))
if buffer.is_satisfied():
break
return buffer
def step(self, buffer):
"""Read and process the self.ifile step by step,
return False if nothing left in self.ifile.
"""
data = self.read()
if not data:
return False
count = self.transform(data)
buffer.push((count, data))
if buffer.is_ready():
x, data = buffer.cut()
self.proc(data)
return True
def proc(self, data):
self.ofile.write(data)
self.ofile.flush()
def handle_last(self, buffer):
while True:
x, data = buffer.pop()
if buffer.is_satisfied():
self.proc(data)
else:
diff = buffer.min - buffer.total
lines = data.splitlines(keepends=True)
self.ofile.writelines(lines[:-diff])
break
self.ofile.flush()
def run(self):
buffer = self.fill()
if buffer.is_satisfied():
while self.step(buffer):
pass
self.handle_last(buffer)
class HeadWorkerTLIT(HeadWorkerULIT):
"""Terminal, line mode ignore tail"""
def read(self):
return self.ifile.readline()
class HeadWorkerUBIT(HeadWorkerULIT):
"""Unseekable, byte mode ignore tail"""
def transform(self, data):
return len(data)
def handle_last(self, buffer):
while True:
x, data = buffer.pop()
if buffer.is_satisfied():
self.ofile.write(data)
else:
diff = buffer.min - buffer.total
self.ofile.write(data[:-diff])
break
self.ofile.flush()
class HeadWorkerTBIT(HeadWorkerUBIT):
"""Terminal, byte mode ignore tail"""
def read(self):
return self.ifile.readline()
class Mixin:
def copy_to_end(self):
while True:
chunk = self.read()
if not chunk:
break
self.ofile.write(chunk)
class TailWorkerSLIH(HeadWorkerSL, Mixin):
"""Seekable, line mode, ignore head"""
def __init__(self, ifile, ofile, amount, bs=None):
super(TailWorkerSLIH, self).__init__(ifile, ofile, amount, bs)
if amount > 0:
self.amount -= 1
def action(self, data, count):
self.amount -= count
def handle_last(self, data):
pos = -1
for i in range(self.amount):
pos = data.index(b'\n', pos+1)
pos += 1
self.ofile.write(data[pos:])
self.copy_to_end()
class TailWorkerSBIH(TailWorkerSLIH):
"""Seekable, byte mode, ignore head"""
def transform(self, data):
return len(data)
def handle_last(self, data):
self.ofile.write(data[self.amount:])
self.copy_to_end()
class TailWorkerSB(TailWorkerSLIH):
def __init__(self, ifile, ofile, bs=None):
self.ifile = ifile
self.ofile = ofile
self.bs = bs or 8192
def run(self):
self.copy_to_end()
class TailWorkerULIH(HeadWorkerULIT, Mixin):
"""Unseekable, line mode ignore head"""
def proc(self, data):
"""Just ignore the data"""
def handle_last(self, buffer):
while True:
x, data = buffer.pop()
if not buffer.is_satisfied():
diff = buffer.min - buffer.total
self.split_and_proc(data, diff)
for x, data in buffer.data:
self.ofile.write(data)
break
def split_and_proc(self, data, diff):
lines = data.splitlines(keepends=True)
self.ofile.writelines(lines[-diff:])
class TailWorkerUBIH(TailWorkerULIH):
"""Unseekable, byte mode ignore head"""
def read(self):
return self.ifile.read(self.bs)
def transform(self, data):
return len(data)
def split_and_proc(self, data, diff):
self.ofile.write(data[-diff:])
class TailWorkerTLIH(TailWorkerULIH):
"""Terminal, line mode ignore head"""
def read(self):
return self.ifile.readline()
class TailWorkerTBIH(TailWorkerTLIH):
"""Terminal, byte mode ignore head"""
def transform(self, data):
return len(data)
def split_and_proc(self, data, diff):
self.ofile.write(data[-diff:])
class TailWorkerTL(TailWorkerSLIH):
"""Terminal, line mode, ignore head"""
def read(self):
return self.ifile.readline()
def handle_last(self, data):
self.copy_to_end()
class TailWorkerTB(TailWorkerTL):
"""Terminal, byte mode, ignore head"""
def transform(self, data):
return len(data)
def handle_last(self, data):
self.ofile.write(data[self.amount:])
self.copy_to_end()
class GrepNameDetermined(Exception): pass
class GrepStatusDetermined(Exception): pass
class GrepWorker:
# VT100 color code
c_fname = b'\x1b[35m' # magenta
c_sep = b'\x1b[36m' # cyan
c_lnum = b'\x1b[32m' # green
c_match = b'\x1b[31m\x1b[1m' # bold red
c_off = b'\x1b[0m' # turn off color
sep_line = b'--\n'
c_sep_line = c_sep + b'--' + c_off + b'\n'
def __init__(self, pattern, options, ifile, ofile, bs=None):
self.pattern = pattern
self.options = options
self.ifile = ifile
self.ofile = ofile
self.bs = bs or 8192
self.nr = 0 # number of records
self.fname = self.make_fname(ifile.name)
self.status = False
# Invert the sense of matching
if ('invert' in options and 'file_match' not in options
and 'count' not in options):
self.on_match, self.on_not_match = self.on_not_match, self.on_match
# set on_match method for -q option
if 'quiet' in options:
self.on_match = self.quiet_on_match
# set reader for tty input file
if ifile.isatty():
self.read = self.read_tty
self.write = self.write_tty
# setup color output
color = options['color']
if color == 'always' or self.ofile.isatty() and color == 'auto':
self.sep_line = self.c_sep_line
self.make_fname_str = self.make_color_fname_str
self.make_lnum_str = self.make_color_lnum_str
self.make_matcher = self.make_color_matcher
self.matcher = self.make_matcher(options)
def insert_line_number(self, lines, num, sep=b':'):
"""Insert line number to the head of each line"""
num = str(num).encode()
num_str = self.make_lnum_str(num, sep)
return (b'%s%s' % (num_str, line) for line in lines)
def insert_file_name(self, lines, fname, sep=b':'):
"""Insert file name to the head of each line"""
fname_str = self.make_fname_str(fname, sep)
return (b'%s%s' % (fname_str, line) for line in lines)
def make_lnum_str(self, num, sep):
return num + sep
def make_fname_str(self, fname, sep):
return fname + sep
def make_color_lnum_str(self, num, sep):
return self.c_lnum + num + self.c_sep + sep + self.c_off
def make_color_fname_str(self, fname, sep):
return self.c_fname + fname + self.c_sep + sep + self.c_off
def quiet_on_match(self, *args, **kargs):
raise GrepStatusDetermined
def read(self):
"""Return an enumerate object with line number"""
lines = self.ifile.readlines(self.bs)
if not lines:
return None
count = len(lines)
res = enumerate(lines, self.nr + 1)
self.nr += count
return res
def read_tty(self):
"""Read the terminal, line by line"""
line = self.ifile.readline()
if not line:
return None
self.nr += 1
return [(self.nr, line)]
def make_normal_matcher(self, options):
# handle -w option, match word boundary
pat = self.pattern
if 'word_regexp' in self.options:
pat = r'\b%s\b' % pat
# handle -i option, ignore case
flags = 0
if 'ignore_case' in self.options:
flags |= re.IGNORECASE
pat = re.compile(pat.encode(), flags)
return pat
def make_matcher(self, options):
pat = self.make_normal_matcher(options)
class C:
def findall(self, line):
return pat.findall(line), line
return C()
def make_color_matcher(self, options):
pat = self.make_normal_matcher(options)
c_match = self.c_match
c_off = self.c_off
class C:
def findall(self, line):
matches = pat.findall(line)
if matches:
matches = [c_match + x + c_off for x in matches]
line = re.sub(pat, self.apply_color, line)
return matches, line
def apply_color(self, m):
return c_match + m.group() + c_off
return C()
def make_fname(self, name):
"""Make a file name for output"""
if name == 0:
name = '(standard input)'.encode()
else:
name = str(name).encode()
return name
def format_output(self, lines, lnum, options, sep=b':'):
"""Format lines for output"""
# handle -n option, show line number
if 'line_number' in options:
lines = self.insert_line_number(lines, lnum, sep)
# insert file name if necessary
if options['with_filename']:
lines = self.insert_file_name(lines, self.fname, sep)
return lines
def write(self, lines):
self.ofile.writelines(lines)
def write_tty(self, lines):
"""Write to terminal, flush after every write"""
self.ofile.writelines(lines)
self.ofile.flush()
def on_match(self, matches, line, lnum):
self.status = True
# handle -o option, show only the matched part
if 'only_matching' in self.options:
lines = (x + b'\n' for x in matches)
else:
lines = [line]
lines = self.format_output(lines, lnum, self.options)
self.write(lines)
def on_not_match(self, *args, **kargs):
return None
def run(self):
while True:
lines_data = self.read()
if not lines_data:
break
for n, line in lines_data:
matches, line = self.matcher.findall(line)
if matches:
self.on_match(matches, line, n)
else:
self.on_not_match(matches, line, n)
return self.status
class GrepWorkerAgg(GrepWorker):
def __init__(self, *args, **kargs):
super(GrepWorkerAgg, self).__init__(*args, **kargs)
self.match_count = 0
def format_output(self, lines, options):
"""Format lines for output"""
# insert file name if necessary
if options['with_filename']:
lines = self.insert_file_name(lines, self.fname)
return lines
def on_match(self, matches, line, lnum):
self.status = True
self.match_count += 1
def run(self):
status = super(GrepWorkerAgg, self).run()
lines = [str(self.match_count).encode() + b'\n']
lines = self.format_output(lines, self.options)
self.write(lines)
return status
class GrepWorkerFileName(GrepWorker):
def on_match(self, matches, line, lnum):
raise GrepNameDetermined
def run(self):
try:
super(GrepWorkerFileName, self).run()
status = False
except GrepNameDetermined:
self.write([self.fname + b'\n'])
status = True
return status
class GrepWorkerContext(GrepWorker):
def __init__(self, *args, **kargs):
super(GrepWorkerContext, self).__init__(*args, **kargs)
self.before = self.options.get('before', 0)
self.after = self.options.get('after', 0)
self.b_buf = []
self.a_counter = 0
self.last_written_lnum = 0
def write_separator(self, lnum):
last_lnum = self.last_written_lnum
first_lnum = self.b_buf[0][0] if self.b_buf else lnum
if last_lnum and first_lnum - last_lnum > 1:
self.write([self.sep_line])
def on_match(self, matches, line, lnum):
# the 'before' buffer may contain more lines than needed,
# truncate it before writing the separator in order not
# to interfere the line number calculation.
if self.before:
self.b_buf = self.b_buf[-self.before:]
else:
self.b_buf.clear()
self.write_separator(lnum)
self.write_b_buffer()
super(GrepWorkerContext, self).on_match(matches, line, lnum)
self.last_written_lnum = lnum
self.reset_a_counter()
def on_not_match(self, matches, line, lnum):
if self.a_counter:
if 'only_matching' not in self.options:
lines = self.format_output([line], lnum, self.options, b'-')
self.write(lines)
self.last_written_lnum = lnum
self.a_counter -= 1
else:
self.b_buf.append((lnum, line))
def reset_a_counter(self):
self.a_counter = self.after
def write_b_buffer(self):
"""Write out the 'before' buffer"""
if not self.b_buf:
return
# write only when -o option is not presented,
if 'only_matching' not in self.options:
for lnum, line in self.b_buf:
lines = self.format_output([line], lnum, self.options, b'-')
self.write(lines)
self.last_written_lnum = self.b_buf[-1][0]
self.b_buf.clear()
def run(self):
bs = self.before
while True:
self.b_buf = self.b_buf[-bs:]<|fim▁hole|> lines_data = self.read()
if not lines_data:
break
for n, line in lines_data:
matches, line = self.matcher.findall(line)
if matches:
self.on_match(matches, line, n)
else:
self.on_not_match(matches, line, n)
return self.status
def recursive_walk(worker, names, pattern, options):
"""Process all regular files, descend into directories. When
the -q option is provided, the first match will trigger an
exception named GrepStatusDetermined."""
def processor(names, pattern, options, worker):
status_list = []
for name in names:
if os.path.isfile(name):
status = worker(name, pattern, options)
status_list.append(status)
elif os.path.isdir(name):
try:
sub_names = os.listdir(name)
except Exception as e:
print(str(e), file=sys.stderr)
status_list.append(False)
else:
sub_names = [os.path.join(name, x) for x in sub_names]
names.extend(sub_names)
return status_list
return walk(worker, names, pattern, options, processor)
def walk(worker, names, pattern, options, processor=None):
"""Each file shall be a regular file. When the -q option is
provided, the first match will trigger an exception named
GrepStatusDetermined."""
if not processor:
def processor(names, pattern, options, worker):
status_list = []
for name in names:
status = worker(name, pattern, options)
status_list.append(status)
return status_list
try:
status_list = processor(names, pattern, options, worker)
except GrepStatusDetermined:
status_list = [True]
if 'quiet' in options:
return any(status_list)
else:
return all(status_list)<|fim▁end|>
| |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>import { ImageCallback } from '@jimp/core';
<|fim▁hole|>}
export default function(): Blur;<|fim▁end|>
|
interface Blur {
blur(r: number, cb?: ImageCallback<this>): this;
|
<|file_name|>offset_commit_request_test.go<|end_file_name|><|fim▁begin|>package sarama
import "testing"
var (
offsetCommitRequestNoBlocksV0 = []byte{
0x00, 0x06, 'f', 'o', 'o', 'b', 'a', 'r',
0x00, 0x00, 0x00, 0x00}
offsetCommitRequestNoBlocksV1 = []byte{
0x00, 0x06, 'f', 'o', 'o', 'b', 'a', 'r',
0x00, 0x00, 0x11, 0x22,
0x00, 0x04, 'c', 'o', 'n', 's',
0x00, 0x00, 0x00, 0x00}
offsetCommitRequestNoBlocksV2 = []byte{
0x00, 0x06, 'f', 'o', 'o', 'b', 'a', 'r',
0x00, 0x00, 0x11, 0x22,
0x00, 0x04, 'c', 'o', 'n', 's',
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0x33,
0x00, 0x00, 0x00, 0x00}
offsetCommitRequestOneBlockV0 = []byte{
0x00, 0x06, 'f', 'o', 'o', 'b', 'a', 'r',
0x00, 0x00, 0x00, 0x01,
0x00, 0x05, 't', 'o', 'p', 'i', 'c',
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x52, 0x21,
0x00, 0x00, 0x00, 0x00, 0xDE, 0xAD, 0xBE, 0xEF,
0x00, 0x08, 'm', 'e', 't', 'a', 'd', 'a', 't', 'a'}
offsetCommitRequestOneBlockV1 = []byte{
0x00, 0x06, 'f', 'o', 'o', 'b', 'a', 'r',
0x00, 0x00, 0x11, 0x22,
0x00, 0x04, 'c', 'o', 'n', 's',
0x00, 0x00, 0x00, 0x01,
0x00, 0x05, 't', 'o', 'p', 'i', 'c',
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x52, 0x21,
0x00, 0x00, 0x00, 0x00, 0xDE, 0xAD, 0xBE, 0xEF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x00, 0x08, 'm', 'e', 't', 'a', 'd', 'a', 't', 'a'}
offsetCommitRequestOneBlockV2 = []byte{
0x00, 0x06, 'f', 'o', 'o', 'b', 'a', 'r',
0x00, 0x00, 0x11, 0x22,
0x00, 0x04, 'c', 'o', 'n', 's',
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0x33,
0x00, 0x00, 0x00, 0x01,
0x00, 0x05, 't', 'o', 'p', 'i', 'c',
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x52, 0x21,
0x00, 0x00, 0x00, 0x00, 0xDE, 0xAD, 0xBE, 0xEF,
0x00, 0x08, 'm', 'e', 't', 'a', 'd', 'a', 't', 'a'}
)
func TestOffsetCommitRequestV0(t *testing.T) {
request := new(OffsetCommitRequest)
request.Version = 0
request.ConsumerGroup = "foobar"
testRequest(t, "no blocks v0", request, offsetCommitRequestNoBlocksV0)
request.AddBlock("topic", 0x5221, 0xDEADBEEF, 0, "metadata")
testRequest(t, "one block v0", request, offsetCommitRequestOneBlockV0)
}
func TestOffsetCommitRequestV1(t *testing.T) {
request := new(OffsetCommitRequest)
request.ConsumerGroup = "foobar"
request.ConsumerID = "cons"
request.ConsumerGroupGeneration = 0x1122
request.Version = 1
testRequest(t, "no blocks v1", request, offsetCommitRequestNoBlocksV1)
<|fim▁hole|> request.AddBlock("topic", 0x5221, 0xDEADBEEF, ReceiveTime, "metadata")
testRequest(t, "one block v1", request, offsetCommitRequestOneBlockV1)
}
func TestOffsetCommitRequestV2(t *testing.T) {
request := new(OffsetCommitRequest)
request.ConsumerGroup = "foobar"
request.ConsumerID = "cons"
request.ConsumerGroupGeneration = 0x1122
request.RetentionTime = 0x4433
request.Version = 2
testRequest(t, "no blocks v2", request, offsetCommitRequestNoBlocksV2)
request.AddBlock("topic", 0x5221, 0xDEADBEEF, 0, "metadata")
testRequest(t, "one block v2", request, offsetCommitRequestOneBlockV2)
}<|fim▁end|>
| |
<|file_name|>_text.js<|end_file_name|><|fim▁begin|>/* global Fae, SimpleMDE, toolbarBuiltInButtons */
/**
* Fae form text
* @namespace form.text
* @memberof form
*/
Fae.form.text = {
init: function() {
this.overrideMarkdownDefaults();
this.initMarkdown();
},
/**
* Override SimpleMDE's preference for font-awesome icons and use a modal for the guide
* @see {@link modals.markdownModal}
*/
overrideMarkdownDefaults: function() {
toolbarBuiltInButtons['bold'].className = 'icon-bold';
toolbarBuiltInButtons['italic'].className = 'icon-italic';
toolbarBuiltInButtons['heading'].className = 'icon-font';
toolbarBuiltInButtons['code'].className = 'icon-code';
toolbarBuiltInButtons['unordered-list'].className = 'icon-list-ul';
toolbarBuiltInButtons['ordered-list'].className = 'icon-list-ol';
toolbarBuiltInButtons['link'].className = 'icon-link';
toolbarBuiltInButtons['image'].className = 'icon-image';
toolbarBuiltInButtons['quote'].className = 'icon-quote';
toolbarBuiltInButtons['fullscreen'].className = 'icon-fullscreen no-disable no-mobile';
toolbarBuiltInButtons['horizontal-rule'].className = 'icon-minus';
toolbarBuiltInButtons['preview'].className = 'icon-eye no-disable';
toolbarBuiltInButtons['side-by-side'].className = 'icon-columns no-disable no-mobile';
toolbarBuiltInButtons['guide'].className = 'icon-question';
// Override SimpleMDE's default guide and use a homegrown modal
toolbarBuiltInButtons['guide'].action = Fae.modals.markdownModal;
},
/**
* Find all markdown fields and initialize them with a markdown GUI
* @has_test {features/form_helpers/fae_input_spec.rb}
*/
initMarkdown: function() {
$('.js-markdown-editor:not(.mde-enabled)').each(function() {
var $this = $(this);
var editor = new SimpleMDE({
element: this,
autoDownloadFontAwesome: false,
status: false,
spellChecker: false,
hideIcons: ['image', 'side-by-side', 'fullscreen', 'preview']
});
// Disable tabbing within editor
editor.codemirror.options.extraKeys['Tab'] = false;
editor.codemirror.options.extraKeys['Shift-Tab'] = false;
$this.addClass('mde-enabled');
// code mirror events to hook into current form element functions
editor.codemirror.on('change', function(){
// updates the original textarea's value for JS validations
$this.val(editor.value());
// update length counter
Fae.form.validator.length_counter.updateCounter($this);
});
editor.codemirror.on('focus', function(){<|fim▁hole|> });
editor.codemirror.on('blur', function(){
// trigger blur on the original textarea to trigger JS validations
$this.blur();
$this.parent().removeClass('mde-focus');
});
});
}
};<|fim▁end|>
|
$this.parent().addClass('mde-focus');
|
<|file_name|>eo.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2012, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'image', 'eo', {
alertUrl: 'Bonvolu tajpi la retadreson de la bildo',
alt: 'Anstataŭiga Teksto',
border: 'Bordero',
btnUpload: 'Sendu al Servilo',
button2Img: 'Ĉu vi volas transformi la selektitan bildbutonon en simplan bildon?',
hSpace: 'Horizontala Spaco',
img2Button: 'Ĉu vi volas transformi la selektitan bildon en bildbutonon?',
infoTab: 'Informoj pri Bildo',
linkTab: 'Ligilo',
lockRatio: 'Konservi Proporcion',
menu: 'Atributoj de Bildo',
resetSize: 'Origina Grando',
title: 'Atributoj de Bildo',
titleButton: 'Bildbutonaj Atributoj',
upload: 'Alŝuti',
urlMissing: 'La fontretadreso de la bildo mankas.',
vSpace: 'Vertikala Spaco',<|fim▁hole|> validateVSpace: 'La vertikala spaco devas esti entjera nombro.'
});<|fim▁end|>
|
validateBorder: 'La bordero devas esti entjera nombro.',
validateHSpace: 'La horizontala spaco devas esti entjera nombro.',
|
<|file_name|>edit.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
/**
* Introduce the vendorAppApp.customer.list.edit module
* and configure it.
*
* @requires 'ui.router',
* @requires 'ngMaterial',
* @requires vendorAppApp.mongooseError
* @requires vendorAppApp.customer.service
*/
angular
.module('vendorAppApp.customer.list.edit', [
'ui.router',
'ngMaterial',
'vendorAppApp.mongooseError',
'vendorAppApp.customer.service'
])
.config(configureCustomerListEdit);
<|fim▁hole|>
/**
* Route configuration function configuring the passed $stateProvider.
* Register the customer.list.edit state with the edit template
* paired with the CustomerEditController as 'edit' for the
* '[email protected]' view.
* 'customer' is resolved as the customer with the id found in
* the state parameters.
*
* @param {$stateProvider} $stateProvider - The state provider to configure
*/
function configureCustomerListEdit($stateProvider) {
// The edit state configuration.
var editState = {
name: 'customer.list.edit',
parent: 'customer.list',
url: '/edit/:id',
authenticate: true,
role: 'user',
onEnter: onEnterCustomerListEdit,
views: {
'[email protected]': {
templateUrl: 'app/customer/list/edit/edit.html',
controller: 'CustomerEditController',
controllerAs: 'edit',
resolve: {customer: resolveCustomerFromArray}
}
}
};
$stateProvider.state(editState);
}
// inject onCustomerListEditEnter dependencies
onEnterCustomerListEdit.$inject = ['$timeout', 'ToggleComponent'];
/**
* Executed when entering the customer.list.detail state. Open the component
* registered with the component id 'customer.detailView'.
*
* @params {$timeout} $timeout - The $timeout service to wait for view initialization
* @params {ToggleComponent} ToggleComponent - The service to toggle the detail view
*/
function onEnterCustomerListEdit($timeout, ToggleComponent) {
$timeout(showDetails, 0, false);
function showDetails() {
ToggleComponent('customer.detailView').open();
}
}
// inject resolveCustomerDetailRoute dependencies
resolveCustomerFromArray.$inject = ['customers', '$stateParams', '_'];
/**
* Resolve dependencies for the customer.list.edit state. Get the customer
* from the injected Array of customers by using the '_id' property.
*
* @params {Array} customers - The array of customers
* @params {Object} $stateParams - The $stateParams to read the customer id from
* @params {Object} _ - The lodash service to find the requested customer
* @returns {Object|null} The customer whose value of the _id property equals $stateParams._id
*/
function resolveCustomerFromArray(customers, $stateParams, _) {
// return Customer.get({id: $stateParams.id}).$promise;
return _.find(customers, {'_id': $stateParams.id});
}
})();<|fim▁end|>
|
// inject configCustomerListEdit dependencies
configureCustomerListEdit.$inject = ['$stateProvider'];
|
<|file_name|>cfServerGroupDetailsGetter.ts<|end_file_name|><|fim▁begin|>import { IPromise } from 'angular';
import { isEmpty } from 'lodash';
import { Observable } from 'rxjs';
import { IServerGroupDetailsProps, ServerGroupReader } from '@spinnaker/core';
import { ICloudFoundryLoadBalancer, ICloudFoundryServerGroup } from 'cloudfoundry/domain';
function extractServerGroupSummary(props: IServerGroupDetailsProps): IPromise<ICloudFoundryServerGroup> {
const { app, serverGroup } = props;
return app.ready().then(() => {
let summary: ICloudFoundryServerGroup = app.serverGroups.data.find((toCheck: ICloudFoundryServerGroup) => {
return (
toCheck.name === serverGroup.name &&
toCheck.account === serverGroup.accountId &&
toCheck.region === serverGroup.region
);
});
if (!summary) {
app.loadBalancers.data.some((loadBalancer: ICloudFoundryLoadBalancer) => {
if (loadBalancer.account === serverGroup.accountId && loadBalancer.region === serverGroup.region) {
return loadBalancer.serverGroups.some(possibleServerGroup => {
if (possibleServerGroup.name === serverGroup.name) {
summary = possibleServerGroup;
return true;
}
return false;
});
}
return false;
});
}<|fim▁hole|> });
}
export function cfServerGroupDetailsGetter(
props: IServerGroupDetailsProps,
autoClose: () => void,
): Observable<ICloudFoundryServerGroup> {
const { app, serverGroup: serverGroupInfo } = props;
return new Observable<ICloudFoundryServerGroup>(observer => {
extractServerGroupSummary(props).then(summary => {
ServerGroupReader.getServerGroup(
app.name,
serverGroupInfo.accountId,
serverGroupInfo.region,
serverGroupInfo.name,
).then((serverGroup: ICloudFoundryServerGroup) => {
// it's possible the summary was not found because the clusters are still loading
Object.assign(serverGroup, summary, { account: serverGroupInfo.accountId });
if (!isEmpty(serverGroup)) {
observer.next(serverGroup);
} else {
autoClose();
}
}, autoClose);
}, autoClose);
});
}<|fim▁end|>
|
return summary;
|
<|file_name|>NewsPage.js<|end_file_name|><|fim▁begin|>import Page from '../Page';
import './news.scss';
import RoomList from "../parts/RoomList";
export default class NewsPage extends Page {
indexAction() {
this.headerTitle = "新着・おすすめ";
var $switchPanel = $(`
<div class="switch-panel">
<div class="switch-btn selected new">
<span class="title">新着</span>
<span class="count">--</span>
<span class="ken">件</span>
</div>
<div class="switch-btn pickup">
<span class="title">おすすめ</span>
<span class="count">--</span>
<span class="ken">件</span>
</div>
</div>
`);
this.$main.append($switchPanel);
var $newCount = $switchPanel.find(".new .count");
var $pickupCount = $switchPanel.find(".pickup .count");
getPickupCount()
.then( count => $pickupCount.html(count) );
RoomList.findAll({new:1}, $newCount)
.then( $roomList => {
this.$contents.append($roomList);
});
var $pickupBtn = $switchPanel.find(".pickup");
var $newBtn = $switchPanel.find(".new");
$pickupBtn.on("click", () => {
$newBtn.removeClass("selected");<|fim▁hole|> });
});
$newBtn.on("click", () => {
$pickupBtn.removeClass("selected");
$newBtn.addClass("selected");
RoomList.findAll({new:1})
.then( $roomList => {
this.$contents.append($roomList);
});
});
}
}
function getPickupCount() {
return global.APP.api.ietopia.room.count({pickup:1})
}<|fim▁end|>
|
$pickupBtn.addClass("selected");
RoomList.findAll({pickup:1})
.then( $roomList => {
this.$contents.append($roomList);
|
<|file_name|>env.py<|end_file_name|><|fim▁begin|>from abc import abstractmethod
import sys, abc
if sys.version_info >= (3, 4):
ABC = abc.ABC
else:
ABC = abc.ABCMeta('ABC', (), {})
import numpy as np
from enum import Enum<|fim▁hole|> class Terminate(Enum):
Null = 0
Fail = 1
Succ = 2
def __init__(self, args, enable_draw):
self.enable_draw = enable_draw
return<|fim▁end|>
|
class Env(ABC):
|
<|file_name|>ManejadorUbicar.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates<|fim▁hole|>package model;
import controller.GestorLocalidad;
import java.util.ArrayList;
import org.w3c.dom.Document;
/**
*
* @author soriagal
*/
public class ManejadorUbicar {
private WebUbicar webUbicar;
private GestorLocalidad gestorLocalidad = new GestorLocalidad();
private ArrayList idLocalidades;
private ArrayList documents; //object: document con el xml
private ArrayList localidades = new ArrayList(); //object: objeto localidad
public void pedirUbicacion () {
idLocalidades = gestorLocalidad.getLocalidades();
webUbicar = new WebUbicar();
documents = webUbicar.getDocuments(idLocalidades);
for (int i=0; i<documents.size(); i++) {
Localidad localidad;
LectorUbicacionXml lector = new LectorUbicacionXml((Document) documents.get(i));
localidad = lector.getLocalidad();
localidades.add(localidad);
}
}
public void guardarUbicacion () {
for (int i=0; i<localidades.size(); i++) {
Localidad localidad = (Localidad) localidades.get(i);
gestorLocalidad.guardarUbicacion(localidad);
}
}
}<|fim▁end|>
|
* and open the template in the editor.
*/
|
<|file_name|>analysis-form-view.spec.js<|end_file_name|><|fim▁begin|>var Backbone = require('backbone');
var camshaftReference = require('builder/data/camshaft-reference');
var areaOfInfluenceTemplate = require('builder/editor/layers/layer-content-views/analyses/analysis-form-models/area-of-influence-form.tpl');
var BaseAnalysisFormModel = require('builder/editor/layers/layer-content-views/analyses/analysis-form-models/base-analysis-form-model');
var AnalysisFormView = require('builder/editor/layers/layer-content-views/analyses/analysis-form-view');
var analyses = require('builder/data/analyses');
describe('editor/layers/layer-content-view/analyses/analysis-form-view', function () {
beforeEach(function () {
this.formModel = new BaseAnalysisFormModel({
id: 'a1',
type: 'buffer',
source: 'a0',
radius: '100'
}, {
analyses: analyses,
configModel: {},
layerDefinitionModel: {},
analysisSourceOptionsModel: {}
});
this.formModel.schema.source = {type: 'Text'};
this.formModel.schema.radius = {type: 'Number'};
spyOn(this.formModel, 'getTemplate').and.returnValue(areaOfInfluenceTemplate);
spyOn(this.formModel, 'getTemplateData').and.returnValue({parametersDataFields: 'radius'});
spyOn(this.formModel, 'setFormValidationErrors').and.callThrough();
spyOn(camshaftReference, 'validate');
this.view = new AnalysisFormView({
formModel: this.formModel,
configModel: {}
});
this.view.render();
});
it('should render with template and data from form model', function () {
expect(this.view.$el.html()).toContain('form');
expect(this.view.$el.html()).toContain('data-fields="radius"');
});
it('should not validate when view is rendered intially', function () {
expect(camshaftReference.validate).not.toHaveBeenCalled();
});
describe('when form changes with erroneous data', function () {
beforeEach(function () {
camshaftReference.validate.and.returnValue({radius: '42 is not the answer, you fool!'});
// simulate change
this.view._formView.setValue('radius', '42');
this.view._formView.trigger('change');
});
it('should show errors when validation fails', function (done) {
var self = this;
setTimeout(function () {
expect(camshaftReference.validate).toHaveBeenCalled();
expect(self.view.$el.html()).toContain('Error');
done();
}, 0);
});
it('should update form model anyway', function () {
expect(this.formModel.get('radius')).toEqual(42);
});
it('should set form validation errors on the model', function (done) {
var self = this;
setTimeout(function () {
expect(self.formModel.setFormValidationErrors).toHaveBeenCalled();
expect(self.formModel.setFormValidationErrors.calls.argsFor(0)[0]).toBeUndefined();
expect(self.formModel.setFormValidationErrors.calls.argsFor(1)[0]).toEqual({radius: jasmine.any(String)});
done();
}, 0);
});
describe('when validation passes', function () {
beforeEach(function () {
this.formModel.setFormValidationErrors.calls.reset();
camshaftReference.validate.and.returnValue(undefined);
// simulate change
this.view._formView.setValue('radius', '20');
this.view._formView.trigger('change');<|fim▁hole|> it('should remove form validation errors', function () {
expect(this.formModel.setFormValidationErrors.calls.argsFor(0)[0]).toBeUndefined();
expect(this.formModel.setFormValidationErrors.calls.argsFor(1)[0]).toBeUndefined();
});
it('should update model', function () {
expect(this.formModel.get('radius')).toEqual(20);
});
});
});
describe('when schema changes', function () {
beforeEach(function () {
this.prev$form = this.view.$('form');
this.formModel.trigger('changeSchema');
});
afterEach(function () {
this.prev$form = null;
});
it('should re-render the form', function () {
expect(this.view.$('form').length).toEqual(1);
expect(this.view.$('form')).not.toBe(this.prev$form);
});
});
describe('_onChangeAnalysisFormView and _showAnalysisFormErrors', function () {
it('_onChangeAnalysisFormView calls _showAnalysisFormErrors with the form view Id that was active when the function was called', function (done) {
var self = this;
spyOn(this.view, '_showAnalysisFormErrors');
var formId = this.view._formView.cid;
this.view._onChangeAnalysisFormView();
setTimeout(function () {
expect(self.view._showAnalysisFormErrors).toHaveBeenCalledWith(formId);
done();
}, 0);
});
it('_showAnalysisFormErrors should not commit the form if the current form is different from the one who called it', function () {
var _prevShowFn = this.view._showAnalysisFormErrors;
var self = this;
spyOn(this.view, '_showAnalysisFormErrors').and.callFake(function (formId) {
self.view._formView.cid = 'another-form-808';
_prevShowFn.call(this, formId);
});
spyOn(this.view, '_formView');
this.view._onChangeAnalysisFormView();
expect(this.view._formView).not.toHaveBeenCalled();
});
});
describe('when form is cleaned', function () {
beforeEach(function () {
spyOn(Backbone.Form.prototype, 'remove').and.callThrough();
this.view.clean();
});
it('should remove form when view is cleaned', function () {
expect(Backbone.Form.prototype.remove).toHaveBeenCalled();
});
});
it('should not have any leaks', function () {
expect(this.view).toHaveNoLeaks();
});
});<|fim▁end|>
|
});
|
<|file_name|>Test1.py<|end_file_name|><|fim▁begin|>import numpy as np
import cv2
import cv2.cv as cv
#im = cv2.imread('/Users/asafvaladarsky/Documents/img/Ad0010401.png')
im = cv2.imread('pic.png')
imgray = cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
invert = 255 - imgray
cv2.imwrite('/Users/asafvaladarsky/Documents/pic1.png', invert)
#ret,thresh = cv2.threshold(invert,0,0,0)
contours, hierarchy = cv2.findContours(invert,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for i in range(0, len(contours)):
if (i % 2 == 0):
cnt = contours[i]
#mask = np.zeros(im2.shape,np.uint8)
#cv2.drawContours(mask,[cnt],0,255,-1)
x,y,w,h = cv2.boundingRect(cnt)
cv2.rectangle(invert,(x,y),(x+w,y+h),(0,255,0),1)
#cv2.drawContours(invert, contours, -1, (255,0,0), 1 )
cv2.imshow('image', invert)
0xFF & cv2.waitKey()
cv2.destroyAllWindows()
'''
gray = cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray,(5,5),0)
thresh = cv2.adaptiveThreshold(blur,255,1,1,11,2)
################# Now finding Contours ###################
contours0, hierarchy = cv2.findContours( im, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = [cv2.approxPolyDP(cnt, 3, True) for cnt in contours0]
def update(levels):
vis = np.zeros((cvImg.height, cvImg.width, 3), np.uint8)
levels = levels - 3
cv2.drawContours( vis, contours, (-1, 3)[levels <= 0], (128,255,255),
3, cv2.CV_AA, hierarchy, abs(levels) )
cv2.imshow('contours', vis)
update(3)
cv2.createTrackbar( "levels+3", "contours", 3, 7, update )
cv2.imshow('image', img)
0xFF & cv2.waitKey()
cv2.destroyAllWindows()
'''
'''
contours,hierarchy = cv2.findContours(thresh,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
samples = np.empty((0,100))
responses = []
keys = [i for i in range(48,58)]
print len(contours)
for cnt in contours:
if cv2.contourArea(cnt)>50:
[x,y,w,h] = cv2.boundingRect(cnt)
<|fim▁hole|> if h>28:
cv2.rectangle(im,(x,y),(x+w,y+h),(0,0,255),2)
roi = thresh[y:y+h,x:x+w]
roismall = cv2.resize(roi,(10,10))
cv2.imshow('norm',im)
key = cv2.waitKey(0)
if key == 27:
sys.exit()
elif key in keys:
responses.append(int(chr(key)))
sample = roismall.reshape((1,100))
samples = np.append(samples,sample,0)
else:
print "boho"
responses = np.array(responses,np.float32)
responses = responses.reshape((responses.size,1))
print("training complete")
np.savetxt('generalsamples.data',samples)
np.savetxt('generalresponses.data',responses)
'''<|fim▁end|>
| |
<|file_name|>metrics_client_example_test.go<|end_file_name|><|fim▁begin|>// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package logging_test
import (
"context"
logging "cloud.google.com/go/logging/apiv2"
"google.golang.org/api/iterator"
loggingpb "google.golang.org/genproto/googleapis/logging/v2"
)
func ExampleNewMetricsClient() {
ctx := context.Background()
c, err := logging.NewMetricsClient(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use client.
_ = c
}
func ExampleMetricsClient_ListLogMetrics() {
ctx := context.Background()
c, err := logging.NewMetricsClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &loggingpb.ListLogMetricsRequest{
// TODO: Fill request struct fields.
}
it := c.ListLogMetrics(ctx, req)
for {
resp, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
}
func ExampleMetricsClient_GetLogMetric() {
ctx := context.Background()
c, err := logging.NewMetricsClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &loggingpb.GetLogMetricRequest{
// TODO: Fill request struct fields.
}
resp, err := c.GetLogMetric(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleMetricsClient_CreateLogMetric() {
ctx := context.Background()
c, err := logging.NewMetricsClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &loggingpb.CreateLogMetricRequest{
// TODO: Fill request struct fields.
}
resp, err := c.CreateLogMetric(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp<|fim▁hole|>func ExampleMetricsClient_UpdateLogMetric() {
ctx := context.Background()
c, err := logging.NewMetricsClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &loggingpb.UpdateLogMetricRequest{
// TODO: Fill request struct fields.
}
resp, err := c.UpdateLogMetric(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleMetricsClient_DeleteLogMetric() {
ctx := context.Background()
c, err := logging.NewMetricsClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &loggingpb.DeleteLogMetricRequest{
// TODO: Fill request struct fields.
}
err = c.DeleteLogMetric(ctx, req)
if err != nil {
// TODO: Handle error.
}
}<|fim▁end|>
|
}
|
<|file_name|>nvd3LineChart.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.widgets')
.directive('wtNvd3LineChart', function ($filter) {
return {
restrict: 'A',
replace: true,
templateUrl: 'template/widgets/nvd3LineChart/nvd3LineChart.html',
scope: {
data: '=data',
showLegend: '@',
showTimeRange: '=?',
timeAxisFormat: '=?'
},
controller: function ($scope) {
var filter = $filter('date');
var numberFilter = $filter('number');
$scope.xAxisTickFormatFunction = function () {
return function (d) {
return filter(d, $scope.timeAxisFormat);
};
};
$scope.yAxisTickFormatFunction = function () {
return function (d) {
if (d > 999) {
var value;
var scale;
if (d < 999999) {
value = Math.round(d/1000);
scale = 'k';
} else {
value = Math.round(d/1000000);
scale = 'm';
}
return numberFilter(value) + scale;
} else {
return numberFilter(d);
}
};
};
$scope.xFunction = function () {
return function (d) {
return d.timestamp;
};
};
$scope.yFunction = function () {
return function (d) {<|fim▁hole|> };
},
link: function postLink(scope, element, attrs) {
if (!_.has(attrs, 'showTimeRange')) {
scope.showTimeRange = true;
}
scope.timeAxisFormat = scope.timeAxisFormat || 'HH:mm';
scope.$watch('data', function (data) {
if (data && data[0] && data[0].values && (data[0].values.length > 1)) {
var timeseries = _.sortBy(data[0].values, function (item) {
return item.timestamp;
});
var start = timeseries[0].timestamp;
var end = timeseries[timeseries.length - 1].timestamp;
scope.start = start;
scope.end = end;
}
});
}
};
});<|fim▁end|>
|
return d.value;
};
|
<|file_name|>LongSerializationPolicy.java<|end_file_name|><|fim▁begin|>package com.google.gson;
public enum LongSerializationPolicy {
DEFAULT {
public final JsonElement serialize(Long l) {
return new JsonPrimitive((Number) l);
}
},
STRING {
public final JsonElement serialize(Long l) {
return new JsonPrimitive(String.valueOf(l));
}
};
public abstract JsonElement serialize(Long l);<|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>h5l.rs<|end_file_name|><|fim▁begin|>pub use self::H5L_type_t::*;
use libc::{c_int, c_uint, c_void, c_char, size_t, ssize_t, int64_t, uint32_t};
use std::mem::transmute;
use h5::{htri_t, haddr_t, herr_t, hbool_t, hsize_t, H5_index_t, H5_iter_order_t};
use h5i::hid_t;
use h5t::{H5T_cset_t};
pub const H5L_MAX_LINK_NAME_LEN: uint32_t = !0;
pub const H5L_SAME_LOC: hid_t = 0;
pub const H5L_LINK_CLASS_T_VERS: c_uint = 0;
#[repr(C)]
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
pub enum H5L_type_t {
H5L_TYPE_ERROR = -1,
H5L_TYPE_HARD = 0,
H5L_TYPE_SOFT = 1,
H5L_TYPE_EXTERNAL = 64,
H5L_TYPE_MAX = 255,
}
pub const H5L_TYPE_BUILTIN_MAX: H5L_type_t = H5L_TYPE_SOFT;
pub const H5L_TYPE_UD_MIN: H5L_type_t = H5L_TYPE_EXTERNAL;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct H5L_info_t {
pub _type: H5L_type_t,
pub corder_valid: hbool_t,
pub corder: int64_t,
pub cset: H5T_cset_t,
pub u: __H5L_info_t__u,
}
impl ::std::default::Default for H5L_info_t {
fn default() -> H5L_info_t { unsafe { ::std::mem::zeroed() } }
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct __H5L_info_t__u {
pub _bindgen_data_: [u64; 1usize],
}
impl ::std::default::Default for __H5L_info_t__u {
fn default() -> __H5L_info_t__u { unsafe { ::std::mem::zeroed() } }
}
impl __H5L_info_t__u {
pub unsafe fn address(&mut self) -> *mut haddr_t {
transmute(&self._bindgen_data_)
}
pub unsafe fn val_size(&mut self) -> *mut size_t {
transmute(&self._bindgen_data_)
}
}
pub type H5L_create_func_t = Option<extern fn (link_name: *const c_char, loc_group: hid_t, lnkdata:
*const c_void, lnkdata_size: size_t, lcpl_id: hid_t)
-> herr_t>;
pub type H5L_move_func_t = Option<extern fn (new_name: *const c_char, new_loc: hid_t, lnkdata:
*const c_void, lnkdata_size: size_t) -> herr_t>;
pub type H5L_copy_func_t = Option<extern fn (new_name: *const c_char, new_loc: hid_t, lnkdata:
*const c_void, lnkdata_size: size_t) -> herr_t>;
pub type H5L_traverse_func_t = Option<extern fn (link_name: *const c_char, cur_group: hid_t,
lnkdata: *const c_void, lnkdata_size: size_t,
lapl_id: hid_t) -> hid_t>;
pub type H5L_delete_func_t = Option<extern fn (link_name: *const c_char, file: hid_t, lnkdata:
*const c_void, lnkdata_size: size_t) -> herr_t>;
pub type H5L_query_func_t = Option<extern fn (link_name: *const c_char, lnkdata: *const c_void,
lnkdata_size: size_t, buf: *mut c_void, buf_size:
size_t) -> ssize_t>;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct H5L_class_t {
pub version: c_int,
pub id: H5L_type_t,
pub comment: *const c_char,
pub create_func: H5L_create_func_t,
pub move_func: H5L_move_func_t,<|fim▁hole|> pub trav_func: H5L_traverse_func_t,
pub del_func: H5L_delete_func_t,
pub query_func: H5L_query_func_t,
}
impl ::std::default::Default for H5L_class_t {
fn default() -> H5L_class_t { unsafe { ::std::mem::zeroed() } }
}
pub type H5L_iterate_t = Option<extern fn (group: hid_t, name: *const c_char, info: *const
H5L_info_t, op_data: *mut c_void) -> herr_t>;
pub type H5L_elink_traverse_t = Option<extern fn (parent_file_name: *const c_char,
parent_group_name: *const c_char, child_file_name:
*const c_char, child_object_name: *const c_char,
acc_flags: *mut c_uint, fapl_id: hid_t, op_data:
*mut c_void) -> herr_t>;
extern {
pub fn H5Lmove(src_loc: hid_t, src_name: *const c_char, dst_loc: hid_t, dst_name: *const c_char,
lcpl_id: hid_t, lapl_id: hid_t) -> herr_t;
pub fn H5Lcopy(src_loc: hid_t, src_name: *const c_char, dst_loc: hid_t, dst_name: *const c_char,
lcpl_id: hid_t, lapl_id: hid_t) -> herr_t;
pub fn H5Lcreate_hard(cur_loc: hid_t, cur_name: *const c_char, dst_loc: hid_t, dst_name: *const
c_char, lcpl_id: hid_t, lapl_id: hid_t) -> herr_t;
pub fn H5Lcreate_soft(link_target: *const c_char, link_loc_id: hid_t, link_name: *const c_char,
lcpl_id: hid_t, lapl_id: hid_t) -> herr_t;
pub fn H5Ldelete(loc_id: hid_t, name: *const c_char, lapl_id: hid_t) -> herr_t;
pub fn H5Ldelete_by_idx(loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order:
H5_iter_order_t, n: hsize_t, lapl_id: hid_t) -> herr_t;
pub fn H5Lget_val(loc_id: hid_t, name: *const c_char, buf: *mut c_void, size: size_t, lapl_id:
hid_t) -> herr_t;
pub fn H5Lget_val_by_idx(loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order:
H5_iter_order_t, n: hsize_t, buf: *mut c_void, size: size_t, lapl_id:
hid_t) -> herr_t;
pub fn H5Lexists(loc_id: hid_t, name: *const c_char, lapl_id: hid_t) -> htri_t;
pub fn H5Lget_info(loc_id: hid_t, name: *const c_char, linfo: *mut H5L_info_t, lapl_id: hid_t)
-> herr_t;
pub fn H5Lget_info_by_idx(loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order:
H5_iter_order_t, n: hsize_t, linfo: *mut H5L_info_t, lapl_id: hid_t)
-> herr_t;
pub fn H5Lget_name_by_idx(loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order:
H5_iter_order_t, n: hsize_t, name: *mut c_char, size: size_t, lapl_id:
hid_t) -> ssize_t;
pub fn H5Literate(grp_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, idx: *mut
hsize_t, op: H5L_iterate_t, op_data: *mut c_void) -> herr_t;
pub fn H5Literate_by_name(loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order:
H5_iter_order_t, idx: *mut hsize_t, op: H5L_iterate_t, op_data: *mut
c_void, lapl_id: hid_t) -> herr_t;
pub fn H5Lvisit(grp_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5L_iterate_t,
op_data: *mut c_void) -> herr_t;
pub fn H5Lvisit_by_name(loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order:
H5_iter_order_t, op: H5L_iterate_t, op_data: *mut c_void, lapl_id:
hid_t) -> herr_t;
pub fn H5Lcreate_ud(link_loc_id: hid_t, link_name: *const c_char, link_type: H5L_type_t, udata:
*const c_void, udata_size: size_t, lcpl_id: hid_t, lapl_id: hid_t) ->
herr_t;
pub fn H5Lregister(cls: *const H5L_class_t) -> herr_t;
pub fn H5Lunregister(id: H5L_type_t) -> herr_t;
pub fn H5Lis_registered(id: H5L_type_t) -> htri_t;
pub fn H5Lunpack_elink_val(ext_linkval: *const c_void, link_size: size_t, flags: *mut c_uint,
filename: *mut *const c_char, obj_path: *mut *const c_char) ->
herr_t;
pub fn H5Lcreate_external(file_name: *const c_char, obj_name: *const c_char, link_loc_id: hid_t,
link_name: *const c_char, lcpl_id: hid_t, lapl_id: hid_t) -> herr_t;
}<|fim▁end|>
|
pub copy_func: H5L_copy_func_t,
|
<|file_name|>ckeditor.files.js<|end_file_name|><|fim▁begin|>/**
* Nooku Framework - http://www.nooku.org
*
* @copyright Copyright (C) 2011 - 2017 Johan Janssens and Timble CVBA. (http://www.timble.net)
* @license GNU AGPLv3 <https://www.gnu.org/licenses/agpl.html>
* @link https://github.com/timble/openpolice-platform
*/
if(!Ckeditor) var Ckeditor = {};
Ckeditor.Files = new Class({
Extends: Files.App,
Implements: [Events, Options],
options: {
types: ['file', 'image'],
editor: null,
preview: 'files-preview',
grid: {
cookie: false,
layout: 'compact',
batch_delete: false
},
history: {
enabled: false
}
},
initialize: function(options) {
this.parent(options);
this.editor = this.options.editor;
this.preview = document.id(this.options.preview);
},
setPaginator: function() {
},
setPathway: function() {<|fim▁hole|> this.fireEvent('beforeSetState');
var opts = this.options.state;
this.state = new Files.State(opts);
this.fireEvent('afterSetState');
},
setGrid: function() {
var opts = this.options.grid;
var that = this;
$extend(opts, {
'onClickImage': function(e) {
that.setPreview(document.id(e.target), 'image');
},
'onClickFile': function(e) {
that.setPreview(document.id(e.target), 'file');
}
});
this.grid = new Files.Grid(this.options.grid.element, opts);
},
setPreview: function(target, type) {
var node = target.getParent('.files-node-shadow') || target.getParent('.files-node');
var row = node.retrieve('row');
var copy = $extend({}, row);
var path = row.baseurl+"/"+row.filepath;
var url = path.replace(Files.sitebase+'/', '').replace(/files\/[^\/]+\//, '');
// Update active row
node.getParent().getChildren().removeClass('active');
node.addClass('active');
// Load preview template
copy.template = 'details_'+type;
this.preview.empty();
copy.render('compact').inject(this.preview);
// Inject preview image
if (type == 'image') {
this.preview.getElement('img').set('src', copy.image);
}
// When no text is selected use the file name
if (type == 'file') {
if(document.id('image-text').get('value') == ""){
document.id('image-text').set('value', row.name);
}
}
document.id('image-url').set('value', url);
document.id('image-type').set('value',row.metadata.mimetype);
}
});<|fim▁end|>
|
},
setState: function() {
// TODO: Implement pagination into the view
|
<|file_name|>strings.go<|end_file_name|><|fim▁begin|>// Description: utils/strings.go
// Author: ZHU HAIHUA
// Since: 2016-04-08 19:45
package util
import (
"encoding/json"
"fmt"
. "reflect"
"strconv"
)
type StringStyle int
const (
StringStyleShort StringStyle = iota
StringStyleMedium
StringStyleLong
)
// ToJson return the json format of the obj
// when error occur it will return empty.
// Notice: unexported field will not be marshaled
func ToJson(obj interface{}) string {
result, err := json.Marshal(obj)
if err != nil {
return fmt.Sprintf("<no value with error: %v>", err)
}
return string(result)
}
// ToString return the common string format of the obj according
// to the given arguments
//
// by default obj.ToString() will be called if this method exists.
// otherwise we will call ReflectToString() to get it's string
// representation
//
// the args please refer to the ReflectToString() function.
func ToString(obj interface{}, args ...interface{}) string {
if v, ok := obj.(fmt.Stringer); ok {
return v.String()
}
return ReflectToString(obj, args)
}
const (
// the NONE used to set an empty boundary or separator.
// e.g: you want to set the output of slice with no boundary,
// you NEED to set StringConf as:
//
// &StringConf {
// BoundaryArrayAndSliceStart: NONE, // NOT ""
// BoundaryArrayAndSliceEnd: NONE, // NOT ""
// }
NONE string = "<none>"
)
type StringConf struct {
SepElem string `defaults:","`
SepField string `defaults:", "`
SepKeyValue string `defaults:"="`
BoundaryStructStart string `defaults:"{"`
BoundaryStructEnd string `defaults:"}"`
BoundaryMapStart string `defaults:"{"`
BoundaryMapEnd string `defaults:"}"`
BoundaryArraySliceStart string `defaults:"["`
BoundaryArraySliceEnd string `defaults:"]"`
BoundaryPointerFuncStart string `defaults:"("`
BoundaryPointerFuncEnd string `defaults:")"`
BoundaryInterfaceStart string `defaults:"("`
BoundaryInterfaceEnd string `defaults:")"`
}
// v return the value of StringConf by given key
func (conf *StringConf) v(name string) string {
cnf := ValueOf(conf).Elem()
if field, ok := cnf.Type().FieldByName(name); ok {
value := cnf.FieldByName(name).String()
if value == NONE {
return ""
} else if value == "" {
return field.Tag.Get("defaults")
} else {
return cnf.FieldByName(name).String()
}
} else {
panic(fmt.Errorf("<no such key: %s>", name))
}
}
// ReflectToString return the string formatted by the given arguments,
// the number of optional arguments can be one or two
//
// the first argument is the print style, and it's default value is
// StyleMedium. the second argument is the style configuration pointer.
//
// The long style may be a very long format like following:
//
// Type{name=value}
//
// it's some different from fmt.Printf("%#v\n", value),
// and separated by comma and equal by default.
//
// Then, the medium style would like:
//
// {key=value}
//
// it's some different from fmt.Printf("%+v\n", value),
// and separated by comma and equal by default.
//
// Otherwise the short format will only print the value but no type
// and name information.
//
// since recursive calling, this method would be pretty slow, so if you
// use it to print log, may be you need to check if the log level is
// enabled firstly
//
// examples:
//
// - ReflectToString(input)
// - ReflectToString(input, StringStyleLong)
// - ReflectToString(input, StringStyleMedium, &StringConf{SepElem:";", SepField:",", SepKeyValue:":"})
// - ReflectToString(input, StringStyleLong, &StringConf{SepField:","})
func ReflectToString(obj interface{}, args ...interface{}) string {
style := StringStyleMedium
cnf := &StringConf{}
switch len(args) {
case 1:
style = args[0].(StringStyle)
case 2:
style = args[0].(StringStyle)
cnf = args[1].(*StringConf)
}
return valueToString(ValueOf(obj), style, cnf)
}
// valueToString recursively print all the value
func valueToString(val Value, style StringStyle, cnf *StringConf) string {
var str string
if !val.IsValid() {
return "<zero Value>"
}
typ := val.Type()
switch val.Kind() {
case Int, Int8, Int16, Int32, Int64:
return strconv.FormatInt(val.Int(), 10)
case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
return strconv.FormatUint(val.Uint(), 10)
case Float32, Float64:
return strconv.FormatFloat(val.Float(), 'g', -1, 64)
case Complex64, Complex128:
c := val.Complex()
return strconv.FormatFloat(real(c), 'g', -1, 64) + "+" + strconv.FormatFloat(imag(c), 'g', -1, 64) + "i"
case String:
return val.String()
case Bool:
if val.Bool() {<|fim▁hole|> } else {
return "false"
}
case Ptr:
v := val
if style == StringStyleLong {
str += typ.String() + cnf.v("BoundaryPointerFuncStart")
}
if v.IsNil() {
str += "0"
} else {
str += "&" + valueToString(v.Elem(), style, cnf)
}
if style == StringStyleLong {
str += cnf.v("BoundaryPointerFuncEnd")
}
return str
case Array, Slice:
v := val
if style == StringStyleLong {
str += typ.String()
}
str += cnf.v("BoundaryArraySliceStart")
for i := 0; i < v.Len(); i++ {
if i > 0 {
str += cnf.v("SepElem")
}
str += valueToString(v.Index(i), style, cnf)
}
str += cnf.v("BoundaryArraySliceEnd")
return str
case Map:
t := typ
if style == StringStyleLong {
str += t.String()
}
str += cnf.v("BoundaryMapStart")
//str += "<can't iterate on maps>"
keys := val.MapKeys()
for i, _ := range keys {
if i > 0 {
str += cnf.v("SepElem")
}
str += valueToString(keys[i], style, cnf)
str += cnf.v("SepKeyValue")
str += valueToString(val.MapIndex(keys[i]), style, cnf)
}
str += cnf.v("BoundaryMapEnd")
return str
case Chan:
if style == StringStyleLong {
str += typ.String()
}
return str
case Struct:
t := typ
v := val
if style == StringStyleLong {
str += t.String()
}
str += cnf.v("BoundaryStructStart")
for i, n := 0, v.NumField(); i < n; i++ {
if i > 0 {
str += cnf.v("SepField")
}
if style == StringStyleLong || style == StringStyleMedium {
str += val.Type().Field(i).Name
str += cnf.v("SepKeyValue")
}
str += valueToString(v.Field(i), style, cnf)
}
str += cnf.v("BoundaryStructEnd")
return str
case Interface:
//t := ""
if style == StringStyleLong {
str += typ.String() + cnf.v("BoundaryInterfaceStart")
}
str += valueToString(val.Elem(), style, cnf)
if style == StringStyleLong {
str += cnf.v("BoundaryInterfaceEnd")
}
return str
case Func:
v := val
if style == StringStyleLong {
str += typ.String() + cnf.v("BoundaryPointerFuncStart")
}
str += strconv.FormatUint(uint64(v.Pointer()), 10)
if style == StringStyleLong {
str += cnf.v("BoundaryPointerFuncEnd")
}
return str
default:
panic("valueToString: can't print type " + typ.String())
}
}<|fim▁end|>
|
return "true"
|
<|file_name|>test_game.py<|end_file_name|><|fim▁begin|>import game as game
import pytest
import sys
sys.path.insert(0, '..')
def trim_board(ascii_board):
return '\n'.join([i.strip() for i in ascii_board.splitlines()])
t = trim_board
def test_new_board():
game.Board(3,3).ascii() == t("""
...
...
...
""")
game.Board(4,3).ascii() == t("""
....
....
....
""")
game.Board(3,4).ascii() == t("""
...
...
...
...
""")
def test_game():
board = game.Board(3,3,win=3)
assert board.count_tokens == 0
assert board.game_status == 'active'
assert board.turn_color == None
# drop first token
token = board.drop('x',0)
assert board.game_status == 'active'
assert token.position == (0,0)
assert token.color == 'x'
assert board.ascii() == t("""
...
...
x..
""")
assert board.count_tokens == 1
assert board.turn_color == 'o'
# drop second token
token = board.drop('o',0)
assert board.game_status == 'active'
assert token.position == (0,1)
assert token.color == 'o'
assert board.ascii() == t("""
...
o..
x..
""")
assert board.count_tokens == 2
assert board.turn_color == 'x'
# dropping the wrong color should raise an error
with pytest.raises(Exception):
token = board.drop('o',1)
# drop third token
token = board.drop('x',1)
assert board.game_status == 'active'
assert token.position == (1,0)
assert token.color == 'x'
board.ascii() == t("""<|fim▁hole|> o..
xx.
""")
assert board.count_tokens == 3
assert board.turn_color == 'o'
# drop fourth token
token = board.drop('o',0)
assert board.game_status == 'active'
assert token.position == (0,2)
assert token.color == 'o'
board.ascii() == t("""
o..
o..
xx.
""")
assert board.count_tokens == 4
# drop fifth token
token = board.drop('x',2)
assert board.game_status == 'over'
assert board.won_by == 'x'
assert token.position == (2,0)
assert token.color == 'x'
board.ascii() == t("""
o..
o..
xxx
""")
assert board.count_tokens == 5
def test_load_board():
"""
The Board class should provide a load method to load a predefined board.
the load method should be implemented as a static method like this:
>>> class Test:
>>> @staticmethod
>>> def a_static_factory():
>>> t = Test()
>>> # do something with t and return it
>>> return t
the load function accepts a board layout. It retrieves the dimensions of the board
and loads the provided data into the board.
"""
board = game.Board.load(t("""
o..
o..
xxx
"""))
def test_axis_strings():
board = game.Board.load(t("""
o..
o..
xxx
"""))
# get the axis strings in this order: | \ / -
axis_strings = board.axis_strings(0,0)
assert axis_strings[0] == 'xoo'
assert axis_strings[1] == 'x'
assert axis_strings[2] == 'x..'
assert axis_strings[3] == 'xxx' # the winner :-)
assert board.won_by == 'x'<|fim▁end|>
|
...
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Emulates virtual and hardware devices.
mod bus;
mod cmos;
#[cfg(feature = "direct")]
pub mod direct_io;
#[cfg(feature = "direct")]
pub mod direct_irq;
mod i8042;
pub mod irqchip;
mod pci;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod pit;
pub mod pl030;
mod platform;
mod proxy;
#[cfg(feature = "usb")]
#[macro_use]
mod register_space;
pub mod acpi;
pub mod bat;
mod serial;
pub mod serial_device;
#[cfg(feature = "usb")]
pub mod usb;
#[cfg(feature = "usb")]
mod utils;
pub mod vfio;
pub mod virtio;
pub use self::acpi::ACPIPMResource;
pub use self::bat::{BatteryError, GoldfishBattery};<|fim▁hole|> Bus, BusAccessInfo, BusDevice, BusDeviceObj, BusDeviceSync, BusRange, BusResumeDevice, BusType,
HostHotPlugKey, HotPlugBus,
};
pub use self::cmos::Cmos;
#[cfg(feature = "direct")]
pub use self::direct_io::{DirectIo, DirectMmio};
#[cfg(feature = "direct")]
pub use self::direct_irq::{DirectIrq, DirectIrqError};
pub use self::i8042::I8042Device;
pub use self::irqchip::*;
#[cfg(feature = "audio")]
pub use self::pci::{Ac97Backend, Ac97Dev, Ac97Parameters};
pub use self::pci::{
BarRange, CoIommuDev, CoIommuParameters, CoIommuUnpinPolicy, PciAddress, PciBridge,
PciClassCode, PciConfigIo, PciConfigMmio, PciDevice, PciDeviceError, PciInterruptPin, PciRoot,
PciVirtualConfigMmio, PcieHostRootPort, PcieRootPort, PvPanicCode, PvPanicPciDevice,
StubPciDevice, StubPciParameters, VfioPciDevice,
};
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
pub use self::pit::{Pit, PitError};
pub use self::pl030::Pl030;
pub use self::platform::VfioPlatformDevice;
pub use self::proxy::Error as ProxyError;
pub use self::proxy::ProxyDevice;
pub use self::serial::Serial;
pub use self::serial_device::{
Error as SerialError, SerialDevice, SerialHardware, SerialParameters, SerialType,
};
#[cfg(feature = "usb")]
pub use self::usb::host_backend::host_backend_device_provider::HostBackendDeviceProvider;
#[cfg(feature = "usb")]
pub use self::usb::xhci::xhci_controller::XhciController;
pub use self::vfio::{VfioContainer, VfioDevice};
pub use self::virtio::{vfio_wrapper, VirtioPciDevice};
/// Request CoIOMMU to unpin a specific range.
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct UnpinRequest {
/// The ranges presents (start gfn, count).
ranges: Vec<(u64, u64)>,
}
#[derive(Serialize, Deserialize, Debug)]
pub enum UnpinResponse {
Success,
Failed,
}
#[derive(Debug)]
pub enum ParseIommuDevTypeResult {
NoSuchType,
}
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum IommuDevType {
NoIommu,
VirtioIommu,
CoIommu,
}
use std::str::FromStr;
impl FromStr for IommuDevType {
type Err = ParseIommuDevTypeResult;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"off" => Ok(IommuDevType::NoIommu),
"viommu" => Ok(IommuDevType::VirtioIommu),
"coiommu" => Ok(IommuDevType::CoIommu),
_ => Err(ParseIommuDevTypeResult::NoSuchType),
}
}
}<|fim▁end|>
|
pub use self::bus::Error as BusError;
pub use self::bus::{
|
<|file_name|>add_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os, sys, re
args = sys.argv
if not len(args) in (3, 4):
print "Usage: add_test.py <rawdata file> <report file> [tests stash file]"
print "If the last is not specified, `tests_stash.txt' is assumed"
sys.exit(1)
rawdata = []
for line in open(args[1], "rt").readlines():
lookfor = ["basic_cpuid", "ext_cpuid", "intel_fn4", "intel_fn11"]
good = False
for match in lookfor:
if line.find(match) != -1:
good = True
break
if good:
rawdata.append(line.strip())<|fim▁hole|>for line in open(args[2], "rt").readlines():
s = line.strip()
if s.find(":") == -1:
continue
numeric = ["family", "model", "stepping", "ext_family", "ext_model",
"num_cores", "num_logical", "L1 D cache", "L1 I cache",
"L2 cache", "L3 cache", "L1D assoc.", "L2 assoc.",
"L3 assoc.", "L1D line sz", "L2 line sz", "L3 line sz"]
field = s[:s.find(":")].strip()
if field in numeric:
value = s[s.find(":")+1:].strip()
if not rexp.match(value):
raise "Bad format of value: [%s]" % s
repdata.append(rexp.findall(value)[0])
if field == "code name":
value = s[s.find("`") + 1: s.find("'")]
repdata.append(value)
if field == "features":
value = s[s.find(":") + 2:]
repdata.append(value)
if field == "SSE units":
value = s[s.find(":") + 2:]
# the value here is something like "XX bits (authoritative)". We remove the "bits" part:
i = value.find("bits")
if i != -1:
value = value[:i] + value[i + 5:]
repdata.append(value)
stash = "tests_stash.txt"
if len(args) == 4:
stash = args[3]
fout = open(stash, "at")
delimiter = "-" * 80
lines = rawdata + [delimiter] + repdata + [delimiter]
fout.writelines(map(lambda s: s + "\n", lines))
fout.close()<|fim▁end|>
|
repdata = []
rexp = re.compile('(-?[0-9]+).*')
|
<|file_name|>TcpIpJoiner.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cluster.impl;
import com.hazelcast.config.Config;
import com.hazelcast.config.InterfacesConfig;
import com.hazelcast.config.NetworkConfig;
import com.hazelcast.config.TcpIpConfig;
import com.hazelcast.instance.Node;
import com.hazelcast.internal.cluster.impl.AbstractJoiner;
import com.hazelcast.internal.cluster.impl.ClusterServiceImpl;
import com.hazelcast.internal.cluster.impl.SplitBrainJoinMessage;
import com.hazelcast.internal.cluster.impl.operations.JoinMastershipClaimOp;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.Connection;
import com.hazelcast.spi.properties.GroupProperty;
import com.hazelcast.util.AddressUtil;
import com.hazelcast.util.AddressUtil.AddressMatcher;
import com.hazelcast.util.AddressUtil.InvalidAddressException;
import com.hazelcast.util.Clock;
import com.hazelcast.util.EmptyStatement;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.util.AddressUtil.AddressHolder;
public class TcpIpJoiner extends AbstractJoiner {
private static final long JOIN_RETRY_WAIT_TIME = 1000L;
private static final int LOOK_FOR_MASTER_MAX_TRY_COUNT = 20;
private final int maxPortTryCount;
private volatile boolean claimingMaster;
public TcpIpJoiner(Node node) {
super(node);
int tryCount = node.getProperties().getInteger(GroupProperty.TCP_JOIN_PORT_TRY_COUNT);
if (tryCount <= 0) {
throw new IllegalArgumentException(String.format("%s should be greater than zero! Current value: %d",
GroupProperty.TCP_JOIN_PORT_TRY_COUNT, tryCount));
}
maxPortTryCount = tryCount;
}
public boolean isClaimingMaster() {
return claimingMaster;
}
protected int getConnTimeoutSeconds() {
return config.getNetworkConfig().getJoin().getTcpIpConfig().getConnectionTimeoutSeconds();
}
@Override
public void doJoin() {
final Address targetAddress = getTargetAddress();
if (targetAddress != null) {
long maxJoinMergeTargetMillis = node.getProperties().getMillis(GroupProperty.MAX_JOIN_MERGE_TARGET_SECONDS);
joinViaTargetMember(targetAddress, maxJoinMergeTargetMillis);
if (!clusterService.isJoined()) {
joinViaPossibleMembers();
}
} else if (config.getNetworkConfig().getJoin().getTcpIpConfig().getRequiredMember() != null) {
Address requiredMember = getRequiredMemberAddress();
long maxJoinMillis = getMaxJoinMillis();
joinViaTargetMember(requiredMember, maxJoinMillis);
} else {
joinViaPossibleMembers();
}
}
private void joinViaTargetMember(Address targetAddress, long maxJoinMillis) {
try {
if (targetAddress == null) {
throw new IllegalArgumentException("Invalid target address -> NULL");
}
if (logger.isFineEnabled()) {
logger.fine("Joining over target member " + targetAddress);
}
if (targetAddress.equals(node.getThisAddress()) || isLocalAddress(targetAddress)) {
clusterJoinManager.setThisMemberAsMaster();
return;
}
long joinStartTime = Clock.currentTimeMillis();
Connection connection;
while (shouldRetry() && (Clock.currentTimeMillis() - joinStartTime < maxJoinMillis)) {
connection = node.connectionManager.getOrConnect(targetAddress);
if (connection == null) {
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
continue;
}
if (logger.isFineEnabled()) {
logger.fine("Sending joinRequest " + targetAddress);
}
clusterJoinManager.sendJoinRequest(targetAddress, true);
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
} catch (final Exception e) {
logger.warning(e);
}
}
private void joinViaPossibleMembers() {
try {
blacklistedAddresses.clear();
Collection<Address> possibleAddresses = getPossibleAddresses();
boolean foundConnection = tryInitialConnection(possibleAddresses);
if (!foundConnection) {
logger.fine("This node will assume master role since no possible member where connected to.");
clusterJoinManager.setThisMemberAsMaster();
return;
}
long maxJoinMillis = getMaxJoinMillis();
long startTime = Clock.currentTimeMillis();
while (shouldRetry() && (Clock.currentTimeMillis() - startTime < maxJoinMillis)) {
tryToJoinPossibleAddresses(possibleAddresses);
if (clusterService.isJoined()) {
return;
}
if (isAllBlacklisted(possibleAddresses)) {
logger.fine(
"This node will assume master role since none of the possible members accepted join request.");
clusterJoinManager.setThisMemberAsMaster();
return;
}
boolean masterCandidate = isThisNodeMasterCandidate(possibleAddresses);
if (masterCandidate) {
boolean consensus = claimMastership(possibleAddresses);
if (consensus) {
if (logger.isFineEnabled()) {
Set<Address> votingEndpoints = new HashSet<Address>(possibleAddresses);
votingEndpoints.removeAll(blacklistedAddresses.keySet());
logger.fine("Setting myself as master after consensus!"
+ " Voting endpoints: " + votingEndpoints);
}
clusterJoinManager.setThisMemberAsMaster();
claimingMaster = false;
return;
}
} else {
if (logger.isFineEnabled()) {
logger.fine("Cannot claim myself as master! Will try to connect a possible master...");
}
}
claimingMaster = false;
lookForMaster(possibleAddresses);
}
} catch (Throwable t) {
logger.severe(t);
}
}
@SuppressWarnings("checkstyle:npathcomplexity")
private boolean claimMastership(Collection<Address> possibleAddresses) {
if (logger.isFineEnabled()) {
Set<Address> votingEndpoints = new HashSet<Address>(possibleAddresses);
votingEndpoints.removeAll(blacklistedAddresses.keySet());
logger.fine("Claiming myself as master node! Asking to endpoints: " + votingEndpoints);
}
claimingMaster = true;
Collection<Future<Boolean>> responses = new LinkedList<Future<Boolean>>();
for (Address address : possibleAddresses) {
if (isBlacklisted(address)) {
continue;
}
if (node.getConnectionManager().getConnection(address) != null) {
Future<Boolean> future = node.nodeEngine.getOperationService()
.createInvocationBuilder(ClusterServiceImpl.SERVICE_NAME,
new JoinMastershipClaimOp(), address).setTryCount(1).invoke();
responses.add(future);
}
}
final long maxWait = TimeUnit.SECONDS.toMillis(10);
long waitTime = 0L;
boolean consensus = true;
for (Future<Boolean> response : responses) {
long t = Clock.currentTimeMillis();
try {
consensus = response.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
logger.finest(e);
consensus = false;
} finally {
waitTime += (Clock.currentTimeMillis() - t);
}
if (!consensus) {
break;
}
if (waitTime > maxWait) {
consensus = false;
break;
}
}
return consensus;
}
private boolean isThisNodeMasterCandidate(Collection<Address> possibleAddresses) {
int thisHashCode = node.getThisAddress().hashCode();
for (Address address : possibleAddresses) {
if (isBlacklisted(address)) {
continue;
}
if (node.connectionManager.getConnection(address) != null) {
if (thisHashCode > address.hashCode()) {
return false;
}
}
}
return true;
}
private void tryToJoinPossibleAddresses(Collection<Address> possibleAddresses) throws InterruptedException {
long connectionTimeoutMillis = TimeUnit.SECONDS.toMillis(getConnTimeoutSeconds());
long start = Clock.currentTimeMillis();
while (!clusterService.isJoined() && Clock.currentTimeMillis() - start < connectionTimeoutMillis) {
Address masterAddress = clusterService.getMasterAddress();
if (isAllBlacklisted(possibleAddresses) && masterAddress == null) {
return;
}
if (masterAddress != null) {
if (logger.isFineEnabled()) {
logger.fine("Sending join request to " + masterAddress);
}
clusterJoinManager.sendJoinRequest(masterAddress, true);
} else {
sendMasterQuestion(possibleAddresses);
}
if (!clusterService.isJoined()) {
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
}
}
private boolean tryInitialConnection(Collection<Address> possibleAddresses) throws InterruptedException {
long connectionTimeoutMillis = TimeUnit.SECONDS.toMillis(getConnTimeoutSeconds());
long start = Clock.currentTimeMillis();
while (Clock.currentTimeMillis() - start < connectionTimeoutMillis) {
if (isAllBlacklisted(possibleAddresses)) {
return false;
}
if (logger.isFineEnabled()) {
logger.fine("Will send master question to each address in: " + possibleAddresses);
}
if (sendMasterQuestion(possibleAddresses)) {
return true;
}
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
return false;
}
private boolean isAllBlacklisted(Collection<Address> possibleAddresses) {
return blacklistedAddresses.keySet().containsAll(possibleAddresses);
}
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity"})<|fim▁hole|> while (clusterService.getMasterAddress() == null && tryCount++ < LOOK_FOR_MASTER_MAX_TRY_COUNT) {
sendMasterQuestion(possibleAddresses);
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
if (isAllBlacklisted(possibleAddresses)) {
break;
}
}
if (clusterService.isJoined()) {
return;
}
if (isAllBlacklisted(possibleAddresses) && clusterService.getMasterAddress() == null) {
if (logger.isFineEnabled()) {
logger.fine("Setting myself as master! No possible addresses remaining to connect...");
}
clusterJoinManager.setThisMemberAsMaster();
return;
}
long maxMasterJoinTime = getMaxJoinTimeToMasterNode();
long start = Clock.currentTimeMillis();
while (shouldRetry() && Clock.currentTimeMillis() - start < maxMasterJoinTime) {
Address master = clusterService.getMasterAddress();
if (master != null) {
if (logger.isFineEnabled()) {
logger.fine("Joining to master " + master);
}
clusterJoinManager.sendJoinRequest(master, true);
} else {
break;
}
//noinspection BusyWait
Thread.sleep(JOIN_RETRY_WAIT_TIME);
}
if (!clusterService.isJoined()) {
Address master = clusterService.getMasterAddress();
if (master != null) {
logger.warning("Couldn't join to the master: " + master);
} else {
if (logger.isFineEnabled()) {
logger.fine("Couldn't find a master! But there was connections available: " + possibleAddresses);
}
}
}
}
private boolean sendMasterQuestion(Collection<Address> possibleAddresses) {
if (logger.isFineEnabled()) {
logger.fine("NOT sending master question to blacklisted endpoints: " + blacklistedAddresses);
}
boolean sent = false;
for (Address address : possibleAddresses) {
if (isBlacklisted(address)) {
continue;
}
if (logger.isFineEnabled()) {
logger.fine("Sending master question to " + address);
}
if (clusterJoinManager.sendMasterQuestion(address)) {
sent = true;
}
}
return sent;
}
private Address getRequiredMemberAddress() {
TcpIpConfig tcpIpConfig = config.getNetworkConfig().getJoin().getTcpIpConfig();
String host = tcpIpConfig.getRequiredMember();
try {
AddressHolder addressHolder = AddressUtil.getAddressHolder(host, config.getNetworkConfig().getPort());
if (AddressUtil.isIpAddress(addressHolder.getAddress())) {
return new Address(addressHolder.getAddress(), addressHolder.getPort());
}
InterfacesConfig interfaces = config.getNetworkConfig().getInterfaces();
if (interfaces.isEnabled()) {
InetAddress[] inetAddresses = InetAddress.getAllByName(addressHolder.getAddress());
if (inetAddresses.length > 1) {
for (InetAddress inetAddress : inetAddresses) {
if (AddressUtil.matchAnyInterface(inetAddress.getHostAddress(), interfaces.getInterfaces())) {
return new Address(inetAddress, addressHolder.getPort());
}
}
} else if (AddressUtil.matchAnyInterface(inetAddresses[0].getHostAddress(), interfaces.getInterfaces())) {
return new Address(addressHolder.getAddress(), addressHolder.getPort());
}
} else {
return new Address(addressHolder.getAddress(), addressHolder.getPort());
}
} catch (final Exception e) {
logger.warning(e);
}
return null;
}
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity"})
protected Collection<Address> getPossibleAddresses() {
final Collection<String> possibleMembers = getMembers();
final Set<Address> possibleAddresses = new HashSet<Address>();
final NetworkConfig networkConfig = config.getNetworkConfig();
for (String possibleMember : possibleMembers) {
AddressHolder addressHolder = AddressUtil.getAddressHolder(possibleMember);
try {
boolean portIsDefined = addressHolder.getPort() != -1 || !networkConfig.isPortAutoIncrement();
int count = portIsDefined ? 1 : maxPortTryCount;
int port = addressHolder.getPort() != -1 ? addressHolder.getPort() : networkConfig.getPort();
AddressMatcher addressMatcher = null;
try {
addressMatcher = AddressUtil.getAddressMatcher(addressHolder.getAddress());
} catch (InvalidAddressException ignore) {
EmptyStatement.ignore(ignore);
}
if (addressMatcher != null) {
final Collection<String> matchedAddresses;
if (addressMatcher.isIPv4()) {
matchedAddresses = AddressUtil.getMatchingIpv4Addresses(addressMatcher);
} else {
// for IPv6 we are not doing wildcard matching
matchedAddresses = Collections.singleton(addressHolder.getAddress());
}
for (String matchedAddress : matchedAddresses) {
addPossibleAddresses(possibleAddresses, null, InetAddress.getByName(matchedAddress), port, count);
}
} else {
final String host = addressHolder.getAddress();
final InterfacesConfig interfaces = networkConfig.getInterfaces();
if (interfaces.isEnabled()) {
final InetAddress[] inetAddresses = InetAddress.getAllByName(host);
for (InetAddress inetAddress : inetAddresses) {
if (AddressUtil.matchAnyInterface(inetAddress.getHostAddress(),
interfaces.getInterfaces())) {
addPossibleAddresses(possibleAddresses, host, inetAddress, port, count);
}
}
} else {
addPossibleAddresses(possibleAddresses, host, null, port, count);
}
}
} catch (UnknownHostException e) {
logger.warning("Cannot resolve hostname '" + addressHolder.getAddress()
+ "'. Please make sure host is valid and reachable.");
if (logger.isFineEnabled()) {
logger.fine("Error during resolving possible target!", e);
}
}
}
possibleAddresses.remove(node.getThisAddress());
return possibleAddresses;
}
private void addPossibleAddresses(final Set<Address> possibleAddresses,
final String host, final InetAddress inetAddress,
final int port, final int count) throws UnknownHostException {
for (int i = 0; i < count; i++) {
int currentPort = port + i;
Address address;
if (host != null && inetAddress != null) {
address = new Address(host, inetAddress, currentPort);
} else if (host != null) {
address = new Address(host, currentPort);
} else {
address = new Address(inetAddress, currentPort);
}
if (!isLocalAddress(address)) {
possibleAddresses.add(address);
}
}
}
private boolean isLocalAddress(final Address address) throws UnknownHostException {
final Address thisAddress = node.getThisAddress();
final boolean local = thisAddress.getInetSocketAddress().equals(address.getInetSocketAddress());
if (logger.isFineEnabled()) {
logger.fine(address + " is local? " + local);
}
return local;
}
protected Collection<String> getMembers() {
return getConfigurationMembers(config);
}
public static Collection<String> getConfigurationMembers(Config config) {
final TcpIpConfig tcpIpConfig = config.getNetworkConfig().getJoin().getTcpIpConfig();
final Collection<String> configMembers = tcpIpConfig.getMembers();
final Set<String> possibleMembers = new HashSet<String>();
for (String member : configMembers) {
// split members defined in tcp-ip configuration by comma(,) semi-colon(;) space( ).
String[] members = member.split("[,; ]");
Collections.addAll(possibleMembers, members);
}
return possibleMembers;
}
@Override
public void searchForOtherClusters() {
final Collection<Address> possibleAddresses;
try {
possibleAddresses = getPossibleAddresses();
} catch (Throwable e) {
logger.severe(e);
return;
}
possibleAddresses.remove(node.getThisAddress());
possibleAddresses.removeAll(node.getClusterService().getMemberAddresses());
if (possibleAddresses.isEmpty()) {
return;
}
for (Address address : possibleAddresses) {
SplitBrainJoinMessage response = sendSplitBrainJoinMessage(address);
if (shouldMerge(response)) {
logger.warning(node.getThisAddress() + " is merging [tcp/ip] to " + address);
setTargetAddress(address);
startClusterMerge(address);
return;
}
}
}
@Override
public String getType() {
return "tcp-ip";
}
}<|fim▁end|>
|
private void lookForMaster(Collection<Address> possibleAddresses) throws InterruptedException {
int tryCount = 0;
|
<|file_name|>simulation.py<|end_file_name|><|fim▁begin|>import heapq
class AbstractSimulation(object):
def __init__(self, reality, antmoves, stats):
self.reality = reality
self.antmoves = antmoves
heapq.heapify(antmoves)
self.stats = stats
self.ticks = 0
def tick(self):
ant_move = heapq.heappop(self.antmoves)
self.reality.world.elapsed_time = ant_move.end_time # simulation is now at the point of ant_move.ant arriving at ant_move.destination
new_antmove, changed_items_end = ant_move.process_end(self.reality, self.stats)
assert not self.reality.world.elapsed_time > ant_move.end_time
changed_items_start = new_antmove.process_start()
assert changed_items_start is not None, new_antmove
heapq.heappush(self.antmoves, new_antmove)
self.ticks += 1
return changed_items_start & changed_items_end, self.stats
class TickStepSimulation(AbstractSimulation):
def advance(self):
if self.reality.is_resolved():
return [], True, None
tick_changed_items, stats = self.tick()<|fim▁hole|>
class MultiSpawnStepSimulation(AbstractSimulation):
def __init__(self, reality, *args, **kwargs):
super(MultiSpawnStepSimulation, self).__init__(reality, *args, **kwargs)
self.spawn_amount = 50
self.anthills = reality.world.get_anthills()
def _anthill_food_sum(self):
return sum(anthill.food for anthill in self.anthills)
def advance(self):
if self.reality.is_resolved():
return [], True, None
anthill_food_pre_tick = self._anthill_food_sum()
changed_items = set()
amount = 0
while amount <= self.spawn_amount:
tick_changed_items, stats = self.tick()
changed_items.update(tick_changed_items)
anthill_food_post_tick = self._anthill_food_sum()
if anthill_food_post_tick != anthill_food_pre_tick+amount:
if self.reality.is_resolved():
break
amount += 1
return changed_items, False, stats.last_route
class SpawnStepSimulation(MultiSpawnStepSimulation):
def __init__(self, reality, *args, **kwargs):
super(SpawnStepSimulation, self).__init__(reality, *args, **kwargs)
self.spawn_amount = 1
class LastSpawnStepSimulation(MultiSpawnStepSimulation):
def __init__(self, reality, *args, **kwargs):
super(LastSpawnStepSimulation, self).__init__(reality, *args, **kwargs)
self.spawn_amount = reality.world.get_total_food()<|fim▁end|>
|
print 'ticks: %d, food_discovered: %d' % (self.ticks, stats.food_discovered)
return tick_changed_items, False, stats.last_route
|
<|file_name|>scan_settings.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from math import pi, atan
class ScanSetting(object):
"""docstring for ScanSetting"""
def __init__(self):
super(ScanSetting, self).__init__()
# for scan
self.scan_step = 400 # steps
self.theta_a = pi / 6 # radius between center and laser
self.img_width = 640
self.img_height = 480
self.sensorWidth = 3.67
self.sensorHeight = 2.74 + 0.08
self.focalLength = 3.6
# ######### mockup 2, measure by solidwork###
self.cab_m = self.img_width / 2
self.cab_l = self.img_width / 2
self.cab_r = self.img_width / 2
self.cameraX = 0.0
self.cameraY = 22.28 + 8
self.cameraZ = -174.70
self.laserX_L = -53.61
self.laserY_L = 31.62
self.laserZ_L = -76.47
self.laserX_R = 53.61
self.laserY_R = 31.62
self.laserZ_R = -76.47
self.theta_a = atan(self.laserX_L / self.laserZ_L)
self.MAXLaserRange = 65<|fim▁hole|> self.MagnitudeThreshold = 3
self.LLaserAdjustment = 0
self.RLaserAdjustment = 0
# for modeling
self.NoiseNeighbors = 50
self.NeighborhoodDistance = 10
self.SegmentationDistance = 2
self.CloseBottom = -1000
self.CloseTop = 1000<|fim▁end|>
|
self.LaserRangeMergeDistance = 65
self.MINLaserRange = 3
|
<|file_name|>htmllinkelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListBinding::DOMTokenListMethods;
use crate::dom::bindings::codegen::Bindings::HTMLLinkElementBinding::HTMLLinkElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::cssstylesheet::CSSStyleSheet;
use crate::dom::document::Document;
use crate::dom::domtokenlist::DOMTokenList;
use crate::dom::element::{
cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
};
use crate::dom::element::{AttributeMutation, Element, ElementCreator};
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{
document_from_node, stylesheets_owner_from_node, window_from_node, BindContext, Node,
UnbindContext,
};
use crate::dom::stylesheet::StyleSheet as DOMStyleSheet;
use crate::dom::virtualmethods::VirtualMethods;
use crate::stylesheet_loader::{StylesheetContextSource, StylesheetLoader, StylesheetOwner};
use cssparser::{Parser as CssParser, ParserInput};
use dom_struct::dom_struct;
use embedder_traits::EmbedderMsg;
use html5ever::{LocalName, Prefix};
use net_traits::ReferrerPolicy;
use servo_arc::Arc;
use servo_atoms::Atom;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use style::attr::AttrValue;
use style::media_queries::MediaList;
use style::parser::ParserContext as CssParserContext;
use style::str::HTML_SPACE_CHARACTERS;
use style::stylesheets::{CssRuleType, Stylesheet};
use style_traits::ParsingMode;
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub struct RequestGenerationId(u32);
impl RequestGenerationId {
fn increment(self) -> RequestGenerationId {
RequestGenerationId(self.0 + 1)
}
}
#[dom_struct]
pub struct HTMLLinkElement {
htmlelement: HTMLElement,
rel_list: MutNullableDom<DOMTokenList>,
#[ignore_malloc_size_of = "Arc"]
stylesheet: DomRefCell<Option<Arc<Stylesheet>>>,
cssom_stylesheet: MutNullableDom<CSSStyleSheet>,
/// <https://html.spec.whatwg.org/multipage/#a-style-sheet-that-is-blocking-scripts>
parser_inserted: Cell<bool>,
/// The number of loads that this link element has triggered (could be more
/// than one because of imports) and have not yet finished.
pending_loads: Cell<u32>,
/// Whether any of the loads have failed.
any_failed_load: Cell<bool>,
/// A monotonically increasing counter that keeps track of which stylesheet to apply.
request_generation_id: Cell<RequestGenerationId>,
}
impl HTMLLinkElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
creator: ElementCreator,
) -> HTMLLinkElement {
HTMLLinkElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
rel_list: Default::default(),
parser_inserted: Cell::new(creator.is_parser_created()),
stylesheet: DomRefCell::new(None),
cssom_stylesheet: MutNullableDom::new(None),
pending_loads: Cell::new(0),
any_failed_load: Cell::new(false),
request_generation_id: Cell::new(RequestGenerationId(0)),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
creator: ElementCreator,
) -> DomRoot<HTMLLinkElement> {
Node::reflect_node(
Box::new(HTMLLinkElement::new_inherited(
local_name, prefix, document, creator,
)),
document,
)
}
pub fn get_request_generation_id(&self) -> RequestGenerationId {
self.request_generation_id.get()
}
// FIXME(emilio): These methods are duplicated with
// HTMLStyleElement::set_stylesheet.
#[allow(unrooted_must_root)]
pub fn set_stylesheet(&self, s: Arc<Stylesheet>) {
let stylesheets_owner = stylesheets_owner_from_node(self);
if let Some(ref s) = *self.stylesheet.borrow() {
stylesheets_owner.remove_stylesheet(self.upcast(), s)
}
*self.stylesheet.borrow_mut() = Some(s.clone());
self.clean_stylesheet_ownership();
stylesheets_owner.add_stylesheet(self.upcast(), s);
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
pub fn get_cssom_stylesheet(&self) -> Option<DomRoot<CSSStyleSheet>> {
self.get_stylesheet().map(|sheet| {
self.cssom_stylesheet.or_init(|| {
CSSStyleSheet::new(
&window_from_node(self),
self.upcast::<Element>(),
"text/css".into(),
None, // todo handle location
None, // todo handle title
sheet,
)
})
})
}
pub fn is_alternate(&self) -> bool {
let rel = get_attr(self.upcast(), &local_name!("rel"));
match rel {
Some(ref value) => value
.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("alternate")),
None => false,
}
}
fn clean_stylesheet_ownership(&self) {
if let Some(cssom_stylesheet) = self.cssom_stylesheet.get() {
cssom_stylesheet.set_owner(None);
}
self.cssom_stylesheet.set(None);
}
}
fn get_attr(element: &Element, local_name: &LocalName) -> Option<String> {
let elem = element.get_attribute(&ns!(), local_name);
elem.map(|e| {
let value = e.value();
(**value).to_owned()
})
}
fn string_is_stylesheet(value: &Option<String>) -> bool {
match *value {
Some(ref value) => value
.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("stylesheet")),
None => false,
}
}
/// Favicon spec usage in accordance with CEF implementation:
/// only url of icon is required/used
/// <https://html.spec.whatwg.org/multipage/#rel-icon>
fn is_favicon(value: &Option<String>) -> bool {
match *value {
Some(ref value) => value
.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("icon") || s.eq_ignore_ascii_case("apple-touch-icon")),
None => false,
}
}
impl VirtualMethods for HTMLLinkElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if !self.upcast::<Node>().is_connected() || mutation.is_removal() {
return;
}
let rel = get_attr(self.upcast(), &local_name!("rel"));
match attr.local_name() {
&local_name!("href") => {
if string_is_stylesheet(&rel) {
self.handle_stylesheet_url(&attr.value());
} else if is_favicon(&rel) {
let sizes = get_attr(self.upcast(), &local_name!("sizes"));
self.handle_favicon_url(rel.as_ref().unwrap(), &attr.value(), &sizes);
}
},
&local_name!("sizes") => {
if is_favicon(&rel) {
if let Some(ref href) = get_attr(self.upcast(), &local_name!("href")) {
self.handle_favicon_url(
rel.as_ref().unwrap(),
href,
&Some(attr.value().to_string()),
);
}
}
},
_ => {},
}
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("rel") => AttrValue::from_serialized_tokenlist(value.into()),
_ => self
.super_type()
.unwrap()
.parse_plain_attribute(name, value),
}
}
fn bind_to_tree(&self, context: &BindContext) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(context);
}
if context.tree_connected {
let element = self.upcast();
let rel = get_attr(element, &local_name!("rel"));
let href = get_attr(element, &local_name!("href"));
let sizes = get_attr(self.upcast(), &local_name!("sizes"));
match href {
Some(ref href) if string_is_stylesheet(&rel) => {
self.handle_stylesheet_url(href);
},
Some(ref href) if is_favicon(&rel) => {
self.handle_favicon_url(rel.as_ref().unwrap(), href, &sizes);
},
_ => {},
}
}
}
fn unbind_from_tree(&self, context: &UnbindContext) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(context);
}
if let Some(s) = self.stylesheet.borrow_mut().take() {
self.clean_stylesheet_ownership();
stylesheets_owner_from_node(self).remove_stylesheet(self.upcast(), &s);
}
}
}
impl HTMLLinkElement {
/// <https://html.spec.whatwg.org/multipage/#concept-link-obtain>
fn handle_stylesheet_url(&self, href: &str) {
let document = document_from_node(self);
if document.browsing_context().is_none() {
return;
}
// Step 1.
if href.is_empty() {
return;
}
// Step 2.
let link_url = match document.base_url().join(href) {
Ok(url) => url,
Err(e) => {
debug!("Parsing url {} failed: {}", href, e);
return;
},
};
let element = self.upcast::<Element>();
// Step 3
let cors_setting = cors_setting_for_element(element);
let mq_attribute = element.get_attribute(&ns!(), &local_name!("media"));
let value = mq_attribute.as_ref().map(|a| a.value());
let mq_str = match value {
Some(ref value) => &***value,
None => "",
};
let mut input = ParserInput::new(&mq_str);
let mut css_parser = CssParser::new(&mut input);
let doc_url = document.url();
let window = document.window();
// FIXME(emilio): This looks somewhat fishy, since we use the context
// only to parse the media query list, CssRuleType::Media doesn't make
// much sense.
let context = CssParserContext::new_for_cssom(
&doc_url,
Some(CssRuleType::Media),
ParsingMode::DEFAULT,
document.quirks_mode(),
window.css_error_reporter(),
None,
);
let media = MediaList::parse(&context, &mut css_parser);
let im_attribute = element.get_attribute(&ns!(), &local_name!("integrity"));
let integrity_val = im_attribute.as_ref().map(|a| a.value());
let integrity_metadata = match integrity_val {
Some(ref value) => &***value,
None => "",
};
self.request_generation_id
.set(self.request_generation_id.get().increment());
// TODO: #8085 - Don't load external stylesheets if the node's mq
// doesn't match.
let loader = StylesheetLoader::for_element(self.upcast());
loader.load(
StylesheetContextSource::LinkElement { media: Some(media) },
link_url,
cors_setting,
integrity_metadata.to_owned(),
);
}
fn handle_favicon_url(&self, _rel: &str, href: &str, _sizes: &Option<String>) {
let document = document_from_node(self);
match document.base_url().join(href) {
Ok(url) => {
let window = document.window();
if window.is_top_level() {
let msg = EmbedderMsg::NewFavicon(url.clone());
window.send_to_embedder(msg);
}
},
Err(e) => debug!("Parsing url {} failed: {}", href, e),
}
}
}
impl StylesheetOwner for HTMLLinkElement {
fn increment_pending_loads_count(&self) {
self.pending_loads.set(self.pending_loads.get() + 1)
}
fn load_finished(&self, succeeded: bool) -> Option<bool> {
assert!(self.pending_loads.get() > 0, "What finished?");
if !succeeded {
self.any_failed_load.set(true);
}
self.pending_loads.set(self.pending_loads.get() - 1);
if self.pending_loads.get() != 0 {
return None;
}
let any_failed = self.any_failed_load.get();
self.any_failed_load.set(false);
Some(any_failed)
}
fn parser_inserted(&self) -> bool {
self.parser_inserted.get()
}
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
if self.RelList().Contains("noreferrer".into()) {
return Some(ReferrerPolicy::NoReferrer);
}
None
}
fn set_origin_clean(&self, origin_clean: bool) {
if let Some(stylesheet) = self.get_cssom_stylesheet() {
stylesheet.set_origin_clean(origin_clean);
}
}
}
impl HTMLLinkElementMethods for HTMLLinkElement {
// https://html.spec.whatwg.org/multipage/#dom-link-href
make_url_getter!(Href, "href");
// https://html.spec.whatwg.org/multipage/#dom-link-href
make_url_setter!(SetHref, "href");
// https://html.spec.whatwg.org/multipage/#dom-link-rel
make_getter!(Rel, "rel");
// https://html.spec.whatwg.org/multipage/#dom-link-rel
fn SetRel(&self, rel: DOMString) {
self.upcast::<Element>()
.set_tokenlist_attribute(&local_name!("rel"), rel);
}
// https://html.spec.whatwg.org/multipage/#dom-link-media
make_getter!(Media, "media");
// https://html.spec.whatwg.org/multipage/#dom-link-media
make_setter!(SetMedia, "media");
// https://html.spec.whatwg.org/multipage/#dom-link-integrity
make_getter!(Integrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-link-integrity
make_setter!(SetIntegrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-link-hreflang
make_getter!(Hreflang, "hreflang");
// https://html.spec.whatwg.org/multipage/#dom-link-hreflang
make_setter!(SetHreflang, "hreflang");
// https://html.spec.whatwg.org/multipage/#dom-link-type
make_getter!(Type, "type");
// https://html.spec.whatwg.org/multipage/#dom-link-type
make_setter!(SetType, "type");
// https://html.spec.whatwg.org/multipage/#dom-link-rellist
fn RelList(&self) -> DomRoot<DOMTokenList> {
self.rel_list.or_init(|| {
DOMTokenList::new(
self.upcast(),
&local_name!("rel"),
Some(vec![
Atom::from("alternate"),
Atom::from("apple-touch-icon"),
Atom::from("apple-touch-icon-precomposed"),
Atom::from("canonical"),
Atom::from("dns-prefetch"),
Atom::from("icon"),
Atom::from("import"),
Atom::from("manifest"),
Atom::from("modulepreload"),
Atom::from("next"),
Atom::from("preconnect"),
Atom::from("prefetch"),
Atom::from("preload"),
Atom::from("prerender"),
Atom::from("stylesheet"),
]),
)
})
}
// https://html.spec.whatwg.org/multipage/#dom-link-charset
make_getter!(Charset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-link-charset
make_setter!(SetCharset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-link-rev
make_getter!(Rev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-link-rev
make_setter!(SetRev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-link-target
make_getter!(Target, "target");<|fim▁hole|>
// https://html.spec.whatwg.org/multipage/#dom-link-target
make_setter!(SetTarget, "target");
// https://html.spec.whatwg.org/multipage/#dom-link-crossorigin
fn GetCrossOrigin(&self) -> Option<DOMString> {
reflect_cross_origin_attribute(self.upcast::<Element>())
}
// https://html.spec.whatwg.org/multipage/#dom-link-crossorigin
fn SetCrossOrigin(&self, value: Option<DOMString>) {
set_cross_origin_attribute(self.upcast::<Element>(), value);
}
// https://drafts.csswg.org/cssom/#dom-linkstyle-sheet
fn GetSheet(&self) -> Option<DomRoot<DOMStyleSheet>> {
self.get_cssom_stylesheet().map(DomRoot::upcast)
}
}<|fim▁end|>
| |
<|file_name|>fields.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Field classes.
Includes all fields from `marshmallow.fields` in addition to a custom
`Nested` field and `DelimitedList`.
All fields can optionally take a special `location` keyword argument, which tells webargs
where to parse the request argument from. ::
args = {
'active': fields.Bool(location='query')
'content_type': fields.Str(load_from='Content-Type',
location='headers')
}
"""
import marshmallow as ma
from webargs.core import argmap2schema
__all__ = [
'Nested',
'DelimitedList',
]<|fim▁hole|># Expose all fields from marshmallow.fields.
# We do this instead of 'from marshmallow.fields import *' because webargs
# has its own subclass of Nested
for each in (field_name for field_name in ma.fields.__all__ if field_name != 'Nested'):
__all__.append(each)
globals()[each] = getattr(ma.fields, each)
class Nested(ma.fields.Nested):
"""Same as `marshmallow.fields.Nested`, except can be passed a dictionary as
the first argument, which will be converted to a `marshmallow.Schema`.
"""
def __init__(self, nested, *args, **kwargs):
if isinstance(nested, dict):
nested = argmap2schema(nested)
super(Nested, self).__init__(nested, *args, **kwargs)
class DelimitedList(ma.fields.List):
"""Same as `marshmallow.fields.List`, except can load from either a list or
a delimited string (e.g. "foo,bar,baz").
:param Field cls_or_instance: A field class or instance.
:param str delimiter: Delimiter between values.
:param bool as_string: Dump values to string.
"""
delimiter = ','
def __init__(self, cls_or_instance, delimiter=None, as_string=False, **kwargs):
self.delimiter = delimiter or self.delimiter
self.as_string = as_string
super(DelimitedList, self).__init__(cls_or_instance, **kwargs)
def _serialize(self, value, attr, obj):
ret = super(DelimitedList, self)._serialize(value, attr, obj)
if self.as_string:
return self.delimiter.join(format(each) for each in value)
return ret
def _deserialize(self, value, attr, data):
try:
ret = (
value
if ma.utils.is_iterable_but_not_string(value)
else value.split(self.delimiter)
)
except AttributeError:
self.fail('invalid')
return super(DelimitedList, self)._deserialize(ret, attr, data)<|fim▁end|>
| |
<|file_name|>web-asset.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import {SvgIconProps} from '../../SvgIcon';<|fim▁hole|>
export default function WebAsset(props: SvgIconProps): React.ReactElement<SvgIconProps>;<|fim▁end|>
| |
<|file_name|>service.go<|end_file_name|><|fim▁begin|>// Code generated by goa v3.6.1, DO NOT EDIT.
//
// calc service
//
// Command:
// $ goa gen goa.design/plugins/v3/cors/examples/calc/design -o
// $(GOPATH)/src/goa.design/plugins/cors/examples/calc
package calc
<|fim▁hole|>)
// The calc service exposes public endpoints that defines CORS policy.
type Service interface {
// Add adds up the two integer parameters and returns the results.
Add(context.Context, *AddPayload) (res int, err error)
}
// ServiceName is the name of the service as defined in the design. This is the
// same value that is set in the endpoint request contexts under the ServiceKey
// key.
const ServiceName = "calc"
// MethodNames lists the service method names as defined in the design. These
// are the same values that are set in the endpoint request contexts under the
// MethodKey key.
var MethodNames = [1]string{"add"}
// AddPayload is the payload type of the calc service add method.
type AddPayload struct {
// Left operand
A int
// Right operand
B int
}<|fim▁end|>
|
import (
"context"
|
<|file_name|>misc.py<|end_file_name|><|fim▁begin|>#
# Licensed under the GNU General Public License Version 3
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2013 Aron Parsons <[email protected]>
# Copyright (c) 2011--2015 Red Hat, Inc.
#
# NOTE: the 'self' variable is an instance of SpacewalkShell
# wildcard import
# pylint: disable=W0401,W0614
# unused argument
# pylint: disable=W0613
# invalid function name
# pylint: disable=C0103
import logging
import readline
import shlex
from getpass import getpass
from ConfigParser import NoOptionError
from spacecmd.utils import *
from time import sleep
import xmlrpclib
# list of system selection options for the help output
HELP_SYSTEM_OPTS = '''<SYSTEMS> can be any of the following:
name
ssm (see 'help ssm')
search:QUERY (see 'help system_search')
group:GROUP
channel:CHANNEL
'''
HELP_TIME_OPTS = '''Dates can be any of the following:
Explicit Dates:
Dates can be expressed as explicit date strings in the YYYYMMDD[HHMM]
format. The year, month and day are required, while the hours and
minutes are not; the hours and minutes will default to 0000 if no
values are provided.
Deltas:<|fim▁hole|>mean 2 hours in the future. You can also use negative values to
express times in the past (e.g., -7d would be one week ago).
Units:
s -> seconds
m -> minutes
h -> hours
d -> days
'''
####################
# life of caches in seconds
SYSTEM_CACHE_TTL = 3600
PACKAGE_CACHE_TTL = 86400
ERRATA_CACHE_TTL = 86400
MINIMUM_API_VERSION = 10.8
SEPARATOR = '\n' + '#' * 30 + '\n'
####################
ENTITLEMENTS = ['enterprise_entitled',
'virtualization_host'
]
SYSTEM_SEARCH_FIELDS = ['id', 'name', 'ip', 'hostname',
'device', 'vendor', 'driver', 'uuid']
####################
def help_systems(self):
print HELP_SYSTEM_OPTS
def help_time(self):
print HELP_TIME_OPTS
####################
def help_clear(self):
print 'clear: clear the screen'
print 'usage: clear'
def do_clear(self, args):
os.system('clear')
####################
def help_clear_caches(self):
print 'clear_caches: Clear the internal caches kept for systems' + \
' and packages'
print 'usage: clear_caches'
def do_clear_caches(self, args):
self.clear_system_cache()
self.clear_package_cache()
self.clear_errata_cache()
####################
def help_get_apiversion(self):
print 'get_apiversion: Display the API version of the server'
print 'usage: get_apiversion'
def do_get_apiversion(self, args):
print self.client.api.getVersion()
####################
def help_get_serverversion(self):
print 'get_serverversion: Display the version of the server'
print 'usage: get_serverversion'
def do_get_serverversion(self, args):
print self.client.api.systemVersion()
####################
def help_get_certificateexpiration(self):
print 'get_certificateexpiration: Print the expiration date of the'
print " server's entitlement certificate"
print 'usage: get_certificateexpiration'
def do_get_certificateexpiration(self, args):
date = self.client.satellite.getCertificateExpirationDate(self.session)
print date
####################
def help_list_proxies(self):
print 'list_proxies: List the proxies wihtin the user\'s organization '
print 'usage: list_proxies'
def do_list_proxies(self, args):
proxies = self.client.satellite.listProxies(self.session)
print proxies
####################
def help_get_session(self):
print 'get_session: Show the current session string'
print 'usage: get_session'
def do_get_session(self, args):
if self.session:
print self.session
else:
logging.error('No session found')
####################
def help_help(self):
print 'help: Show help for the given command'
print 'usage: help COMMAND'
####################
def help_history(self):
print 'history: List your command history'
print 'usage: history'
def do_history(self, args):
for i in range(1, readline.get_current_history_length()):
print '%s %s' % (str(i).rjust(4), readline.get_history_item(i))
####################
def help_toggle_confirmations(self):
print 'toggle_confirmations: Toggle confirmation messages on/off'
print 'usage: toggle_confirmations'
def do_toggle_confirmations(self, args):
if self.options.yes:
self.options.yes = False
print 'Confirmation messages are enabled'
else:
self.options.yes = True
logging.warning('Confirmation messages are DISABLED!')
####################
def help_login(self):
print 'login: Connect to a Spacewalk server'
print 'usage: login [USERNAME] [SERVER]'
def do_login(self, args):
(args, _options) = parse_arguments(args)
# logout before logging in again
if len(self.session):
logging.warning('You are already logged in')
return True
# an argument passed to the function get precedence
if len(args) == 2:
server = args[1]
else:
# use the server we were already using
server = self.config['server']
# bail out if not server was given
if not server:
logging.warning('No server specified')
return False
# load the server-specific configuration
self.load_config_section(server)
# an argument passed to the function get precedence
if len(args):
username = args[0]
elif self.config.has_key('username'):
# use the username from before
username = self.config['username']
elif self.options.username:
# use the username from before
username = self.options.username
else:
username = ''
# set the protocol
if self.config.has_key('nossl') and self.config['nossl']:
proto = 'http'
else:
proto = 'https'
server_url = '%s://%s/rpc/api' % (proto, server)
# this will enable spewing out all client/server traffic
verbose_xmlrpc = False
if self.options.debug > 1:
verbose_xmlrpc = True
# connect to the server
logging.debug('Connecting to %s', server_url)
self.client = xmlrpclib.Server(server_url, verbose=verbose_xmlrpc)
# check the API to verify connectivity
try:
self.api_version = self.client.api.getVersion()
logging.debug('Server API Version = %s', self.api_version)
except xmlrpclib.Fault, e:
if self.options.debug > 0:
logging.exception(e)
logging.error('Failed to connect to %s', server_url)
self.client = None
return False
# ensure the server is recent enough
if self.api_version < self.MINIMUM_API_VERSION:
logging.error('API (%s) is too old (>= %s required)',
self.api_version, self.MINIMUM_API_VERSION)
self.client = None
return False
# store the session file in the server's own directory
session_file = os.path.join(self.conf_dir, server, 'session')
# retrieve a cached session
if os.path.isfile(session_file) and not self.options.password:
try:
sessionfile = open(session_file, 'r')
# read the session (format = username:session)
for line in sessionfile:
parts = line.split(':')
# if a username was passed, make sure it matches
if len(username):
if parts[0] == username:
self.session = parts[1]
else:
# get the username from the cache if one
# wasn't passed by the user
username = parts[0]
self.session = parts[1]
sessionfile.close()
except IOError:
logging.error('Could not read %s', session_file)
# check the cached credentials by doing an API call
if self.session:
try:
logging.debug('Using cached credentials from %s', session_file)
self.client.user.listAssignableRoles(self.session)
except xmlrpclib.Fault:
logging.warning('Cached credentials are invalid')
self.current_user = ''
self.session = ''
# attempt to login if we don't have a valid session yet
if not len(self.session):
if len(username):
logging.info('Spacewalk Username: %s', username)
else:
username = prompt_user('Spacewalk Username:', noblank=True)
if self.options.password:
password = self.options.password
# remove this from the options so that if 'login' is called
# again, the user is prompted for the information
self.options.password = None
elif self.config.has_key('password'):
password = self.config['password']
else:
password = getpass('Spacewalk Password: ')
# login to the server
try:
self.session = self.client.auth.login(username, password)
# don't keep the password around
password = None
except xmlrpclib.Fault:
logging.error('Invalid credentials')
return False
try:
# make sure ~/.spacecmd/<server> exists
conf_dir = os.path.join(self.conf_dir, server)
if not os.path.isdir(conf_dir):
os.mkdir(conf_dir, 0700)
# add the new cache to the file
line = '%s:%s\n' % (username, self.session)
# write the new cache file out
sessionfile = open(session_file, 'w')
sessionfile.write(line)
sessionfile.close()
except IOError:
logging.error('Could not write session file')
# load the system/package/errata caches
self.load_caches(server)
# keep track of who we are and who we're connected to
self.current_user = username
self.server = server
logging.info('Connected to %s as %s', server_url, username)
return True
####################
def help_logout(self):
print 'logout: Disconnect from the server'
print 'usage: logout'
def do_logout(self, args):
if self.session:
self.client.auth.logout(self.session)
self.session = ''
self.current_user = ''
self.server = ''
self.do_clear_caches('')
####################
def help_whoami(self):
print 'whoami: Print the name of the currently logged in user'
print 'usage: whoami'
def do_whoami(self, args):
if len(self.current_user):
print self.current_user
else:
logging.warning("You are not logged in")
####################
def help_whoamitalkingto(self):
print 'whoamitalkingto: Print the name of the server'
print 'usage: whoamitalkingto'
def do_whoamitalkingto(self, args):
if len(self.server):
print self.server
else:
logging.warning('Yourself')
####################
def tab_complete_errata(self, text):
options = self.do_errata_list('', True)
options.append('search:')
return tab_completer(options, text)
def tab_complete_systems(self, text):
if re.match('group:', text):
# prepend 'group' to each item for tab completion
groups = ['group:%s' % g for g in self.do_group_list('', True)]
return tab_completer(groups, text)
elif re.match('channel:', text):
# prepend 'channel' to each item for tab completion
channels = ['channel:%s' % s
for s in self.do_softwarechannel_list('', True)]
return tab_completer(channels, text)
elif re.match('search:', text):
# prepend 'search' to each item for tab completion
fields = ['search:%s:' % f for f in self.SYSTEM_SEARCH_FIELDS]
return tab_completer(fields, text)
else:
options = self.get_system_names()
# add our special search options
options.extend(['group:', 'channel:', 'search:'])
return tab_completer(options, text)
def remove_last_history_item(self):
last = readline.get_current_history_length() - 1
if last >= 0:
readline.remove_history_item(last)
def clear_errata_cache(self):
self.all_errata = {}
self.errata_cache_expire = datetime.now()
self.save_errata_cache()
def get_errata_names(self):
return sorted([e.get('advisory_name') for e in self.all_errata])
def get_erratum_id(self, name):
if name in self.all_errata:
return self.all_errata[name]['id']
def get_erratum_name(self, erratum_id):
for erratum in self.all_errata:
if self.all_errata[erratum]['id'] == erratum_id:
return erratum
def generate_errata_cache(self, force=False):
if not force and datetime.now() < self.errata_cache_expire:
return
if not self.options.quiet:
# tell the user what's going on
self.replace_line_buffer('** Generating errata cache **')
channels = self.client.channel.listSoftwareChannels(self.session)
channels = [c.get('label') for c in channels]
for c in channels:
try:
errata = \
self.client.channel.software.listErrata(self.session, c)
except xmlrpclib.Fault:
logging.debug('No access to %s', c)
continue
for erratum in errata:
if erratum.get('advisory_name') not in self.all_errata:
self.all_errata[erratum.get('advisory_name')] = \
{'id': erratum.get('id'),
'advisory_name': erratum.get('advisory_name'),
'advisory_type': erratum.get('advisory_type'),
'date': erratum.get('date'),
'advisory_synopsis': erratum.get('advisory_synopsis')}
self.errata_cache_expire = \
datetime.now() + timedelta(self.ERRATA_CACHE_TTL)
self.save_errata_cache()
if not self.options.quiet:
# restore the original line buffer
self.replace_line_buffer()
def save_errata_cache(self):
save_cache(self.errata_cache_file,
self.all_errata,
self.errata_cache_expire)
def clear_package_cache(self):
self.all_packages_short = {}
self.all_packages = {}
self.all_packages_by_id = {}
self.package_cache_expire = datetime.now()
self.save_package_caches()
def generate_package_cache(self, force=False):
if not force and datetime.now() < self.package_cache_expire:
return
if not self.options.quiet:
# tell the user what's going on
self.replace_line_buffer('** Generating package cache **')
channels = self.client.channel.listSoftwareChannels(self.session)
channels = [c.get('label') for c in channels]
for c in channels:
try:
packages = \
self.client.channel.software.listAllPackages(self.session, c)
except xmlrpclib.Fault:
logging.debug('No access to %s', c)
continue
for p in packages:
if not p.get('name') in self.all_packages_short:
self.all_packages_short[p.get('name')] = ''
longname = build_package_names(p)
if not longname in self.all_packages:
self.all_packages[longname] = [p.get('id')]
else:
self.all_packages[longname].append(p.get('id'))
# keep a reverse dictionary so we can lookup package names by ID
self.all_packages_by_id = {}
for (k, v) in self.all_packages.iteritems():
for i in v:
self.all_packages_by_id[i] = k
self.package_cache_expire = \
datetime.now() + timedelta(seconds=self.PACKAGE_CACHE_TTL)
self.save_package_caches()
if not self.options.quiet:
# restore the original line buffer
self.replace_line_buffer()
def save_package_caches(self):
# store the cache to disk to speed things up
save_cache(self.packages_short_cache_file,
self.all_packages_short,
self.package_cache_expire)
save_cache(self.packages_long_cache_file,
self.all_packages,
self.package_cache_expire)
save_cache(self.packages_by_id_cache_file,
self.all_packages_by_id,
self.package_cache_expire)
# create a global list of all available package names
def get_package_names(self, longnames=False):
self.generate_package_cache()
if longnames:
return self.all_packages.keys()
else:
return self.all_packages_short
def get_package_id(self, name):
self.generate_package_cache()
try:
return set(self.all_packages[name])
except KeyError:
return
def get_package_name(self, package_id):
self.generate_package_cache()
try:
return self.all_packages_by_id[package_id]
except KeyError:
return
def clear_system_cache(self):
self.all_systems = {}
self.system_cache_expire = datetime.now()
self.save_system_cache()
def generate_system_cache(self, force=False, delay=0):
if not force and datetime.now() < self.system_cache_expire:
return
if not self.options.quiet:
# tell the user what's going on
self.replace_line_buffer('** Generating system cache **')
# we might need to wait for some systems to delete
if delay:
sleep(delay)
systems = self.client.system.listSystems(self.session)
self.all_systems = {}
for s in systems:
self.all_systems[s.get('id')] = s.get('name')
self.system_cache_expire = \
datetime.now() + timedelta(seconds=self.SYSTEM_CACHE_TTL)
self.save_system_cache()
if not self.options.quiet:
# restore the original line buffer
self.replace_line_buffer()
def save_system_cache(self):
save_cache(self.system_cache_file,
self.all_systems,
self.system_cache_expire)
def load_caches(self, server):
conf_dir = os.path.join(self.conf_dir, server)
try:
if not os.path.isdir(conf_dir):
os.mkdir(conf_dir, 0700)
except OSError:
logging.error('Could not create directory %s', conf_dir)
return
self.ssm_cache_file = os.path.join(conf_dir, 'ssm')
self.system_cache_file = os.path.join(conf_dir, 'systems')
self.errata_cache_file = os.path.join(conf_dir, 'errata')
self.packages_long_cache_file = os.path.join(conf_dir, 'packages_long')
self.packages_by_id_cache_file = \
os.path.join(conf_dir, 'packages_by_id')
self.packages_short_cache_file = \
os.path.join(conf_dir, 'packages_short')
# load self.ssm from disk
(self.ssm, _ignore) = load_cache(self.ssm_cache_file)
# update the prompt now that we loaded the SSM
self.postcmd(False, '')
# load self.all_systems from disk
(self.all_systems, self.system_cache_expire) = \
load_cache(self.system_cache_file)
# load self.all_errata from disk
(self.all_errata, self.errata_cache_expire) = \
load_cache(self.errata_cache_file)
# load self.all_packages_short from disk
(self.all_packages_short, self.package_cache_expire) = \
load_cache(self.packages_short_cache_file)
# load self.all_packages from disk
(self.all_packages, self.package_cache_expire) = \
load_cache(self.packages_long_cache_file)
# load self.all_packages_by_id from disk
(self.all_packages_by_id, self.package_cache_expire) = \
load_cache(self.packages_by_id_cache_file)
def get_system_names(self):
self.generate_system_cache()
return self.all_systems.values()
# check for duplicate system names and return the system ID
def get_system_id(self, name):
self.generate_system_cache()
try:
# check if we were passed a system instead of a name
system_id = int(name)
if system_id in self.all_systems:
return system_id
except ValueError:
pass
# get a set of matching systems to check for duplicate names
systems = []
for system_id in self.all_systems:
if name == self.all_systems[system_id]:
systems.append(system_id)
if len(systems) == 1:
return systems[0]
elif not len(systems):
logging.warning("Can't find system ID for %s", name)
return 0
else:
logging.warning('Duplicate system profile names found!')
logging.warning("Please reference systems by ID or resolve the")
logging.warning("underlying issue with 'system_delete' or 'system_rename'")
id_list = '%s = ' % name
for system_id in systems:
id_list = id_list + '%i, ' % system_id
logging.warning('')
logging.warning(id_list[:-2])
return 0
def get_system_name(self, system_id):
self.generate_system_cache()
try:
return self.all_systems[system_id]
except KeyError:
return
def get_org_id(self, name):
details = self.client.org.getDetails(self.session, name)
return details.get('id')
def expand_errata(self, args):
if not isinstance(args, list):
args = args.split()
self.generate_errata_cache()
if len(args) == 0:
return self.all_errata
errata = []
for item in args:
if re.match('search:', item):
item = re.sub('search:', '', item)
errata.extend(self.do_errata_search(item, True))
else:
errata.append(item)
matches = filter_results(self.all_errata, errata)
return matches
def expand_systems(self, args):
if not isinstance(args, list):
args = shlex.split(args)
systems = []
system_ids = []
for item in args:
if re.match('ssm', item, re.I):
systems.extend(self.ssm)
elif re.match('group:', item):
item = re.sub('group:', '', item)
members = self.do_group_listsystems("'%s'" % item, True)
if len(members):
systems.extend([re.escape(m) for m in members])
else:
logging.warning('No systems in group %s', item)
elif re.match('search:', item):
query = item.split(':', 1)[1]
results = self.do_system_search(query, True)
if len(results):
systems.extend([re.escape(r) for r in results])
elif re.match('channel:', item):
item = re.sub('channel:', '', item)
members = self.do_softwarechannel_listsystems(item, True)
if len(members):
systems.extend([re.escape(m) for m in members])
else:
logging.warning('No systems subscribed to %s', item)
else:
# translate system IDs that the user passes
try:
sys_id = int(item)
system_ids.append(sys_id)
except ValueError:
# just a system name
systems.append(item)
matches = filter_results(self.get_system_names(), systems)
return list(set(matches + system_ids))
def list_base_channels(self):
all_channels = self.client.channel.listSoftwareChannels(self.session)
base_channels = []
for c in all_channels:
if not c.get('parent_label'):
base_channels.append(c.get('label'))
return base_channels
def list_child_channels(self, system=None, parent=None, subscribed=False):
channels = []
if system:
system_id = self.get_system_id(system)
if not system_id:
return
if subscribed:
channels = \
self.client.system.listSubscribedChildChannels(self.session,
system_id)
else:
channels = self.client.system.listSubscribableChildChannels(
self.session, system_id)
elif parent:
all_channels = \
self.client.channel.listSoftwareChannels(self.session)
for c in all_channels:
if parent == c.get('parent_label'):
channels.append(c)
else:
# get all channels that have a parent
all_channels = \
self.client.channel.listSoftwareChannels(self.session)
for c in all_channels:
if c.get('parent_label'):
channels.append(c)
return [c.get('label') for c in channels]
def user_confirm(self, prompt='Is this ok [y/N]:', nospacer=False,
integer=False, ignore_yes=False):
if self.options.yes and not ignore_yes:
return True
if nospacer:
answer = prompt_user('%s' % prompt)
else:
answer = prompt_user('\n%s' % prompt)
if re.match('y', answer, re.I):
if integer:
return 1
else:
return True
else:
if integer:
return 0
else:
return False
# check if the available API is recent enough
def check_api_version(self, want):
want_parts = [int(i) for i in want.split('.')]
have_parts = [int(i) for i in self.api_version.split('.')]
if len(have_parts) == 2 and len(want_parts) == 2:
if have_parts[0] == want_parts[0]:
# compare minor versions if majors are the same
return have_parts[1] >= want_parts[1]
else:
# only compare major versions if they differ
return have_parts[0] >= want_parts[0]
else:
# compare the whole value
return float(self.api_version) >= float(want)
# replace the current line buffer
def replace_line_buffer(self, msg=None):
# restore the old buffer if we weren't given a new line
if not msg:
msg = readline.get_line_buffer()
# don't print a prompt if there wasn't one to begin with
if len(readline.get_line_buffer()):
new_line = '%s%s' % (self.prompt, msg)
else:
new_line = '%s' % msg
# clear the current line
self.stdout.write('\r'.ljust(len(self.current_line) + 1))
self.stdout.flush()
# write the new line
self.stdout.write('\r%s' % new_line)
self.stdout.flush()
# keep track of what is displayed so we can clear it later
self.current_line = new_line
def load_config_section(self, section):
config_opts = ['server', 'username', 'password', 'nossl']
if not self.config_parser.has_section(section):
logging.debug('Configuration section [%s] does not exist', section)
return
logging.debug('Loading configuration section [%s]', section)
for key in config_opts:
# don't override command-line options
if self.options.__dict__[key]:
# set the config value to the command-line argument
self.config[key] = self.options.__dict__[key]
else:
try:
self.config[key] = self.config_parser.get(section, key)
except NoOptionError:
pass
# handle the nossl boolean
if self.config.has_key('nossl') and isinstance(self.config['nossl'], str):
if re.match('^1|y|true$', self.config['nossl'], re.I):
self.config['nossl'] = True
else:
self.config['nossl'] = False
# Obfuscate the password with asterisks
config_debug = self.config.copy()
if config_debug.has_key('password'):
config_debug['password'] = "*" * len(config_debug['password'])
logging.debug('Current Configuration: %s', config_debug)<|fim▁end|>
|
Dates can be expressed as delta values. For example, '2h' would
|
<|file_name|>regepe_flask_server.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from flask import Flask,render_template,send_file,Response,flash,request,redirect,session
from werkzeug.utils import secure_filename
import json
import os.path
import os
import gzip
import urllib
from db import DbGetListOfDates,DbGet,DbGetComments,DbGetMulitple,DbGetNearbyPoints,DbPut,DbPutWithoutPassword,DbSearchWord,DbGetMapsOfUser,DbGetAllMaps,DbAddComment,CheckValidMapId,CheckValidFreetext,DbDelMap,DbChkPwd
import anydbm
import traceback
from progress import GetProgress,SetProgress
from users import CheckSession,Login,ActivateUser,SendActivationMail,ReserveUser,GetUserFromUserOrEmail,SendForgotPasswordMail
import sys
from orchestrator import BuildMap,ProcessTrkSegWithProgress,BuildMapFromTrack
from searchparser import SearchQueryParser
from sets import Set
from textutils import remove_accents
from log import Log
from mapparser import ParseMap
from model import Track
from options import options_default
from dem import GetEleFromLatLon
from computeprofile import ComputeProfile
from demize import Demize
from generate_id import uniqid
from config import keysnpwds, config
from flask_babel import Babel, gettext
from thumbnail import selectPointsForThumbnail, thumbnailUrlMapbox
# Create flask application
application = Flask(__name__)
application.config['UPLOAD_FOLDER'] = 'uploads'
application.secret_key = keysnpwds['secret_key']
## Internationalization (i18n)
babel = Babel(application)
LANGUAGES = {
'en': 'English',
'fr': 'Francais',
'es': 'Español'
}
@babel.localeselector
def get_locale():
# Uncomment for testing a specific language
#return 'es'
#return 'fr'
# Check if there is a lang in session
if session.has_key('lang'):
return session['lang']
# Else guess the lang from browser request
return request.accept_languages.best_match(LANGUAGES.keys())
@application.route('/i18n.js/<item>')
def i18n_js(item):
""" Translation strings for javascript """
assert(item in ('header','map','prepare')) #basic security check
return render_template('i18n_%s.js'%item)
@application.route('/<lang>/testi18n.js')
def test_i18n_js(lang):
""" To test i18n for javascript because js escaping is not well handled by jinja2 """
session['lang']=lang
return '<html><head></head><body>Press Ctrl+Maj+K and check no errors in console<script>'+render_template('i18n_header.js')+render_template('i18n_map.js')+'</script>'
## Index page
@application.route('/',defaults={'lang':None,'limit':10})
@application.route('/indexall',defaults={'lang':None,'limit':-1})
@application.route('/<lang>/',defaults={'limit':10})
@application.route('/<lang>/indexall',defaults={'limit':10})
def index(lang,limit):
if lang!=None:
session['lang']=lang
maplist = DbGetListOfDates()
cptr = 0
mapsout = []
for date in sorted(maplist.iterkeys(),reverse=True):
maps = maplist[date]
for mapid in maps:
(lat,lon) = DbGet(mapid,'startpoint').split(',')
trackdesc = DbGet(mapid,'trackdesc')
trackuser = DbGet(mapid,'trackuser')
desc=trackdesc.decode('utf8')
mapsout.append({'mapid':mapid,'lat':lat,'lon':lon,'user':trackuser,'desc':desc,'date':date})
cptr += 1
if(limit>-1) and (cptr>limit):
break
if(limit>-1) and (cptr>limit):
break
return render_template('index.html',limit=limit,maps=mapsout,GMapsApiKey=keysnpwds['GMapsApiKey'])
## GPX Export
@application.route('/togpx/<mapid>')
def togpx(mapid):
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapdata=json.load(f)
f.close()
return '<?xml version="1.0" encoding="UTF-8"?>\n<gpx version="1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/0" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd"><trk><trkseg>' + ''.join(map(lambda p:'<trkpt lat="%f" lon="%f"></trkpt>'%(p[0],p[1]),mapdata['points'])) + '</trkseg></trk></gpx>'
## Thumbnails
if not os.path.isdir('data'):
os.mkdir('data')
if not os.path.isdir('data/thumbnail_cache'):
os.mkdir('data/thumbnail_cache')
@application.route('/thumbnail/<mapid>')
@application.route('/thumbnail.php',defaults={'mapid':None})
def thumbnail(mapid):
if mapid==None:
mapid = request.args.get('mapid')
filename = 'data/thumbnail_cache/%s.png'%mapid
if os.path.isfile(filename):
# Return image in cache
return send_file(filename, mimetype='image/png')
else:
ptlist = selectPointsForThumbnail(mapid)
# Build map image url
url = thumbnailUrlMapbox(ptlist)
furl = open('data/thumbnail_cache/%s.url'%(mapid),'w')
furl.write(url)
furl.close()
# Download png, put it in cache and send it
f = urllib.urlopen(url)
fcache = open(filename,'wb')
contents = f.read()
fcache.write(contents)
fcache.close()
f.close()
return contents
## Show map
@application.route('/<lang>/showmap/<mapid>', defaults={'map_type': None})
@application.route('/<lang>/showmap/<mapid>/<map_type>')
@application.route('/<lang>/showmap-flot.php',defaults={'mapid':None,'map_type': None})
@application.route('/<lang>/showmap.php',defaults={'mapid':None,'map_type': None})
@application.route('/showmap/<mapid>', defaults={'lang':None,'map_type': None})
@application.route('/showmap/<mapid>/<map_type>',defaults={'lang':None})
@application.route('/showmap-flot.php',defaults={'lang':None,'mapid':None,'map_type': None})
@application.route('/showmap.php',defaults={'lang':None,'mapid':None,'map_type': None})
def showmap(lang,mapid,map_type):
if lang!=None:
session['lang']=lang
if mapid==None:
mapid=request.args.get('mapid')
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapdata=json.load(f)
f.close()
# Read map db
mapdb = anydbm.open('data/maps/%s.db'%mapid, 'r')
if map_type==None:
map_type = mapdata['type']
# Render
_mapdb={}
for key in mapdb:
_mapdb[key] = mapdb[key].decode('utf-8') # We must convert each utf8 string into unicode for jinja2
out = render_template('showmap.html',domain=config['domain'],mapid=mapid,type=map_type,mapdb=_mapdb,mapdata=mapdata,GMapsApiKey=keysnpwds['GMapsApiKey'],GeoPortalApiKey=keysnpwds['GeoPortalApiKey'])
mapdb.close()
return out
@application.route('/mapdata/<mapid>')
def mapdata(mapid):
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapfromfile=json.load(f)
f.close()
return Response(render_template('mapdata.js',mapdata=mapfromfile,chartdata=json.dumps(mapfromfile['chartdata'])), mimetype='text/javascript')
@application.route('/comments/<mapid>')
def comments(mapid):
comments = DbGetComments(mapid)
return Response('<?xml version="1.0" encoding="UTF-8"?><result>%s</result>' % ''.join(map(lambda comment: '<comment user="%s" date="%s">%s</comment>' % (comment[1],comment[0],comment[2]),comments)), mimetype='text/xml')
@application.route('/sendcomment/<mapid>/<comment>')
def sendcomment(mapid,comment):
try:
user = 'unknown'
if request.form.has_key('user'):
user = request.form.getvalue('user')
if not CheckValidUserName(user):
raise Exception('Invalid user name')
sess = request.form.getvalue('sess')
if CheckSession(user,sess):
pass
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
user = request.remote_addr
if not CheckValidMapId(mapid):
raise Exception(gettext('Invalid map id'))
if not CheckValidFreetext(comment):
raise Exception(gettext('Invalid map id'))
DbAddComment(mapid,user,comment)
result = 'OK'
except Exception, e:
result = str(e)
out = '<?xml version="1.0" encoding="UTF-8"?>\n<result>%s</result>'%result
return Response(out, mimetype='text/xml')
@application.route('/nearmaps/<mapid>')
def nearmaps(mapid):
lat,lon = map(float,DbGet(mapid,'startpoint').split(','))
return '{'+','.join(['"%s":%s' % (_mapid,json.dumps(DbGetMulitple(_mapid,('startpoint','trackdesc','trackuser','date')))) for _mapid in filter(lambda mid: mid!=mapid,DbGetNearbyPoints(lat,lon))])+'}'
@application.route('/dbget/<mapid>/<element>')
def dbget(mapid,element):
try:
val = DbGet(mapid,element.encode('ascii'))
message = 'OK'
except Exception, e:
message = 'Error: ' + str(e)+'\n'+traceback.format_exc()
val = 'Error'
out = '<?xml version="1.0" encoding="UTF-8"?>\n<answer><message>%s</message><pageelementid>%s</pageelementid><value>%s</value></answer>' % (message,element,val)
return Response(out, mimetype='text/xml')
@application.route('/dbput/<mapid>/<pwd>/<ele>/<val>',defaults={'user':None,'sess':-1})
@application.route('/dbput/<mapid>/<pwd>/<ele>/<val>/<user>/<sess>')
def dbput(mapid,pwd,ele,val,user,sess,defaults={'user': None,'sess': -1}):
try:
if user!=None and sess!=-1:
if CheckSession(user,sess):
map_user = DbGet(mapid,'trackuser')
if len(map_user)>0 and map_user==user:
DbPutWithoutPassword(mapid,ele.encode('ascii'),val.encode('utf8'))
message = 'OK'
else:
raise Exception(gettext('Map %s does not belong to user %s, but to user %s') % (mapid,user,map_user))
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
DbPut(mapid,pwd,ele.encode('ascii'),val.encode('utf8'))
message = 'OK'
except Exception, e:
message = 'Error: ' + str(e)
val = 'Error'
out = '<?xml version="1.0" encoding="UTF-8"?>\n<answer><message>%s</message><pageelementid>%s</pageelementid><value>%s</value></answer>' % (message,ele,val)
return Response(out, mimetype='text/xml')
## Send map
@application.route('/<lang>/submitform')
@application.route('/submitform',defaults={'lang':None})
def submitform(lang):
if lang!=None:
session['lang']=lang
return render_template('submitform.html',GMapsApiKey=keysnpwds['GMapsApiKey'])
@application.route('/upload', methods=['POST'])
def upload():
# Get submit_id
submit_id = request.form['submit_id'].encode('ascii')
if not submit_id.isalnum():
return 'Bad submitid'
# Build inputfile array
inputfile = []
i=0
for file in request.files.getlist("file[]"):
# Save each uploaded file
if not os.path.isdir(application.config['UPLOAD_FOLDER']):
os.mkdir(application.config['UPLOAD_FOLDER'])
p=os.path.join(application.config['UPLOAD_FOLDER'], secure_filename('%s_%s.gpx'%(submit_id,i)))
Log('Saving file to %s'%p,submit_id)
file.save(p)
Log('File saved',submit_id)
i+=1
inputfile.append(file)
# In case of import from URL
if request.form.has_key('fromurl') and len(request.form['fromurl'])>0:
inputfile.append(request.form.get('fromurl').encode('ascii'))
if len(inputfile)<1:
return gettext('Error while uploading file')
# Track selection in case file contains several tracks
if request.form.has_key('trk_select'):
trk_id = int(request.form['trk_select'])
else:
trk_id = 0
trk_seg_id = 0
# Get track description
Log('Get track desc',submit_id)
desc = request.form['desc'].encode('utf8')
Log('Check session',submit_id)
# Check session
user = request.form['user']
#sys.stderr.write('%s\n'%(request.form))
if user=='NoUser' or user=='':
user = 'unknown'
else:
sess = request.form['sess']
if not CheckSession(user,sess):
user = 'unknown'
# Parse options (flat,wind,maptype,...)
options = options_default
for key in options:
if request.form.has_key(key):
if type(options[key])==bool:
if request.form.get(key):
options[key]=True
else:
options[key]=False
#options[key]=(request.form[key]=='yes')
elif type(options[key])==int:
options[key]=int(request.form[key])
elif type(options[key])==str or type(options[key])==unicode:
options[key]=request.form[key]
else:
raise Exception(gettext('type %s not handled')%type(options[key]))
Log('options=%s'%options,submit_id)
Log('start BuildMap',submit_id)
try:
pwd = BuildMap(inputfile,submit_id,trk_id,trk_seg_id,submit_id,desc,user,options)
except Exception,e:
Log(str(e))
SetProgress(submit_id,str(e))
return str(e)
Log('end BuildMap',submit_id)
return '''<script type="text/javascript">
var date = new Date();
date.setTime(date.getTime()+(10*24*60*60*1000));
var expires = "; expires="+date.toGMTString();
document.cookie = "pwd%(mapid)s=%(pwd)s"+expires+"; path=/";
location.href=\'/showmap/%(mapid)s\';
</script>'''% {'mapid':submit_id,'pwd':pwd}
@application.route('/getprogress/<submitid>')
def getprogress(submitid):
return GetProgress(submitid.encode('ascii')).decode('utf8')
## Search
class MapSeach(SearchQueryParser):
def GetWord(self, word):
return Set(DbSearchWord('trackdesc',word))
def GetWordWildcard(self, word):
return Set()
def GetQuotes(self, search_string, tmp_result):
return Set()
def map_search_result(mapid):
try:
(lat,lon) = DbGet(mapid,'startpoint').split(',')
except:
(lat,lon)=(0.0,0.0)
trackdesc = DbGet(mapid,'trackdesc')
startdate = DbGet(mapid,'date')
trackuser = DbGet(mapid,'trackuser')
try:
desc = trackdesc.encode('ascii', 'xmlcharrefreplace')
except:
desc = trackdesc
desc = desc.replace('&','&')
return('<map mapid="%s" lat="%s" lon="%s" date="%s" user="%s">%s</map>' % (mapid,lat,lon,startdate,trackuser,desc))
@application.route('/search/<search_req>')
def search(search_req):
try:
req = remove_accents(search_req.encode('utf8').lower(),'utf-8')
mapids = MapSeach().Parse(req)
out='<result><maps>%s</maps></result>'%''.join(map(map_search_result,mapids))
except Exception, e:
out='<error>Error: %s</error>'%e
return Response(out, mimetype='text/xml')
## Show user
def map_retrieve_infos_showuser(mapid):
trackdesc = DbGet(mapid,'trackdesc').decode('utf8')
startdate = DbGet(mapid,'date')
return {'mapid':mapid,'desc':trackdesc,'date':startdate}
@application.route('/<lang>/showuser/<user>')
@application.route('/showuser/<user>',defaults={'lang':None})
def showuser(lang,user):
if lang!=None:
session['lang']=lang
mapids = DbGetMapsOfUser(user.encode('ascii'))
maps = map(map_retrieve_infos_showuser,mapids)
return render_template('showuser.html',user=user,maps=maps)
@application.route('/userinfo/<user>')
def userinfo(user):
mapids = DbGetMapsOfUser(user.encode('ascii'))
out = '<maps>%s</maps>'%''.join(map(map_search_result,mapids))
return Response(out, mimetype='text/xml')
## Browse maps
@application.route('/<lang>/mapofmaps')
@application.route('/mapofmaps',defaults={'lang':None})
def mapofmaps(lang):
if lang!=None:
session['lang']=lang
return render_template('mapofmaps.html',GMapsApiKey=keysnpwds['GMapsApiKey'])
def map_search_result2(lat,lon,mapid):
trackdesc = DbGet(mapid,'trackdesc')
startdate = DbGet(mapid,'date')
trackuser = DbGet(mapid,'trackuser')
try:
desc = trackdesc.encode('ascii', 'xmlcharrefreplace').replace('<','<').replace('>','>')
except:
desc = trackdesc
return('<map mapid="%s" lat="%s" lon="%s" date="%s" user="%s">%s</map>' % (mapid,lat,lon,startdate,trackuser,desc))
def latlonmapids2xml(latlonmapids):
lat,lon,mapids = latlonmapids
return '<maps lat="%.4f" lon="%.4f">%s</maps>' % (lat,lon,''.join(map(lambda mapid:map_search_result2(lat,lon,mapid),mapids)))
@application.route('/getmaplist')
def getmaplist():
latlonmapidss = DbGetAllMaps()
out = '<results>%s</results>' % ''.join(map(latlonmapids2xml,latlonmapidss))
return Response(out, mimetype='text/xml')
## Map Tools
def auth(mapid,pwd,user,sess):
# Check rights
if user!=None and sess!=None:
if CheckSession(user,sess):
map_user = DbGet(mapid,'trackuser')
if len(map_user)>0 and map_user==user:
pass
else:
raise Exception(gettext('Map %s does not belong to user %s, but to user %s') % (mapid,user,map_user))
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
if not DbChkPwd(mapid,pwd):
raise Exception(gettext('You do not have the map\'s password in your browser\'s cookies'))
@application.route('/delmap/<mapid>/<pwd>',defaults={'user':None,'sess':None})
@application.route('/delmap/<mapid>/<pwd>/<user>/<sess>')
def delmap(mapid,pwd,user,sess):
try:
auth(mapid,pwd,user,sess)
# Delete map
DbDelMap(mapid)
mapfile = 'data/mapdata/%s.json.gz' % mapid
os.remove(mapfile)
message = gettext('Map deleted')
except Exception, e:
message = str(e)
return render_template('map_deleted.html',message=message)
def modifymap(mapid,pwd,user,sess,modifyfunction):
try:
# Authentificate
auth(mapid,pwd,user,sess)
# Parse map
options, ptlist = ParseMap(mapid)
# Apply modifications
ptlist,startpointchanged = modifyfunction(ptlist)
# Rebuild map
track = Track(ptlist)
ProcessTrkSegWithProgress(track,mapid,mapid,True,options)
# If start point has changed, then update the database
if startpointchanged:
DbPutWithoutPassword(mapid,'startpoint','%.4f,%.4f' % (track.ptlist[0].lat,track.ptlist[0].lon))
# Recompute thumbnail
previewfile = 'data/thumbnail_cache/%s.png' % mapid
if os.access(previewfile,os.F_OK):
os.remove(previewfile)
message = None
except Exception, e:
message = str(e)
if message==None:
return redirect('/showmap/%s'%mapid)
else:
return render_template('map_action_error.html',message=message,mapid=mapid)
@application.route('/map/crop/<mapid>/<pwd>/<int:pt1>/<int:pt2>',defaults={'user':None,'sess':None})
@application.route('/map/crop/<mapid>/<pwd>/<int:pt1>/<int:pt2>/<user>/<sess>')
def cropmap(mapid,pwd,pt1,pt2,user,sess):
return modifymap(mapid,pwd,user,sess,lambda ptlist: (ptlist[pt1:pt2],pt1!=0))
@application.route('/map/clear/<mapid>/<pwd>/<int:pt1>/<int:pt2>',defaults={'user':None,'sess':None})
@application.route('/map/clear/<mapid>/<pwd>/<int:pt1>/<int:pt2>/<user>/<sess>')
def clearmap(mapid,pwd,pt1,pt2,user,sess):
return modifymap(mapid,pwd,user,sess,lambda ptlist: (ptlist[:pt1]+ptlist[pt2:],pt1==0))
def removepoints(ptlist,ptidxtodel):
l=range(0,len(ptlist))
Log('removepoints: %s %s'%(ptidxtodel,len(ptlist)))
for i in ptidxtodel:
l.remove(i)
return ([ptlist[i] for i in l],0 in ptidxtodel)
@application.route('/map/clearlist/<mapid>/<pwd>/<ptliststr>',defaults={'user':None,'sess':None})
@application.route('/map/clearlist/<mapid>/<pwd>/<ptliststr>/<user>/<sess>')
def clearmaplist(mapid,pwd,ptliststr,user,sess):
ptidxtodel = map(int,ptliststr.split(','))
return modifymap(mapid,pwd,user,sess,lambda ptlist: removepoints(ptlist,ptidxtodel))
@application.route('/map/export/<mapid>')
def exportmap(mapid):
# TODO: build it from client side
pass
@application.route('/map/demize/<int:index>/<mapid>/<pwd>',defaults={'user':None,'sess':None})
@application.route('/map/demize/<int:index>/<mapid>/<pwd>/<user>/<sess>')
def demize(index,mapid,pwd,user,sess):
try:
# Authentificate
auth(mapid,pwd,user,sess)
# Start/continue/finish DEMization. index is current point index, l is total number of points in map
index,l = Demize(index,mapid)
# Format answer
if index==0:
answer = '<answer><result>Done</result></answer>'
else:
percent = index * 100 / l
answer = '<answer><result>OK</result><nextindex>%s</nextindex><percent>%s</percent></answer>' % (index,percent)
except Exception, e:
answer = '<answer><result>%s</result></answer>' % e
return Response('<?xml version="1.0" encoding="UTF-8"?>\n%s'%answer,mimetype='text/xml')
## User services
def CheckHumain(humaincheck):
return ((humaincheck.strip().lower()==gettext('earth'))or(humaincheck.strip().lower()==gettext('the earth')))
@application.route('/<lang>/registerform')
@application.route('/registerform',defaults={'lang':None})
def registerform(lang):
""" Display register form """
if lang!=None:
session['lang']=lang
return render_template('register.html')
@application.route('/register', methods=['POST'])
def register():
mail = request.form['mail'].lower()
user = request.form['user'].lower()
pwd1 = request.form['pwd1']
pwd2 = request.form['pwd2']
humaincheck = request.form['humaincheck']
if not CheckHumain(humaincheck):
return render_template('register.html',error_message=gettext('Humain check error'))
if pwd1!=pwd2:
return render_template('register.html',error_message=gettext('The two password you entered are different. Please enter twice the same password'))
activation_id,err_msg = ReserveUser(user.encode('ascii'),mail.encode('ascii'),pwd1.encode('utf8'))
if activation_id==None:
return render_template('register.html',error_message=err_msg)
SendActivationMail(mail,user,activation_id)
return render_template('user_registered.html',user=user)
@application.route('/activate/<user>/<activationid>')
def activate(user,activationid):
""" Activate user given it's activation_id """
try:
ActivateUser(user,activationid)
except Exception, e:
return render_template('user_activate_error.html',message=str(e))
return render_template('user_activated.html',user=user)
@application.route('/login/<user>/<pwd>')
def login(user,pwd):
""" Check login/password return sesssion_id """
user = user.lower()
try:
(user,sessid) = Login(user,pwd)
except Exception, e:
return Response('<result><user>NoUser</user><sess>-1</sess><error>%s</error></result>'%e, mimetype='text/xml')
out = '<result>'
if user==None:
user = 'NoUser'
sess = -1
out = '<result><user>%s</user><sess>%s</sess></result>' % (user,sessid)
return Response(out, mimetype='text/xml')
@application.route('/chksess/<user>/<sess>')
def chksess(user,sess):
""" Check session_id for a given user """
try:
ret = CheckSession(user,sess)
except Exception, e:
out = '<answer><result>Error: %s</result><user>NoUser</user><sess>-1</sess></answer>' % str(e)
return Response(out, mimetype='text/xml')
if ret:
result = 'OK'
else:
result = 'Expired'
out = '<answer><result>%s</result><user>%s</user><sess>%s</sess></answer>' % (result,user,sess)
return Response(out, mimetype='text/xml')
@application.route('/<lang>/forgotpwd')
@application.route('/forgotpwd',defaults={'lang':None})
def forgotpwd(lang):
if lang!=None:
session['lang']=lang
return render_template('forgotpwd.html')
@application.route('/resendpwd', methods=['POST'])
def resendpwd():
user_mail = request.form['user_mail'].encode('ascii').lower()
humaincheck = request.form['humaincheck']
if not CheckHumain(humaincheck):
return render_template('resendpwd_error.html',error_message=gettext('Humain check error'))
user,err_str = GetUserFromUserOrEmail(user_mail)
if user==None:
return render_template('resendpwd_error.html',error_message=err_str)
mail = SendForgotPasswordMail(user)
return render_template('resendpwd_ok.html',mail=mail)
def retrievemap(mapid):
(lat,lon) = DbGet(mapid,'startpoint').split(',')
desc = DbGet(mapid,'trackdesc').decode('utf8')
startdate = DbGet(mapid,'date')
user = DbGet(mapid,'trackuser')
return {'mapid':mapid,'lat':lat,'lon':lon,'desc':desc,'date':startdate,'user':user}
@application.route('/<lang>/userhome/<user>')
@application.route('/userhome/<user>',defaults={'lang':None})
def userhome(lang,user):
if lang!=None:
session['lang']=lang
mapids = DbGetMapsOfUser(user.encode('ascii'))
return render_template('userhome.html',user=user,maps=map(retrievemap,mapids),GMapsApiKey=keysnpwds['GMapsApiKey'])
@application.route('/mergemaps/<mapidsliststr>/<user>/<sess>')
def mergemaps(mapidsliststr,user,sess):
if not CheckSession(user,sess):
message = gettext('Cannot identify user %s %s')%(user,sess)
else:
mapids = mapidsliststr.split(',')
ptlistmerged = {}
for mapid in mapids:
newmapid = uniqid()
Log("MergeCgi: parse map %s" % mapid,newmapid)
# Parse map
options,ptlist = ParseMap(mapid)
#TODO: merge options
# set right day if needed
if ptlist[0].datetime.year<=1980:
dfromdb = DbGet(mapid,'date')
if dfromdb:
d = datetime.datetime.strptime(dfromdb,'%Y-%m-%d')
for pt in ptlist:
pt.datetime = pt.datetime.replace(year=d.year,month=d.month,day=d.day)
# append to dict
for pt in ptlist:
ptlistmerged[pt.datetime] = pt
ptlistmerged = ptlistmerged.values()
ptlistmerged.sort(key=lambda pt:pt.datetime)
Log("MergeCgi: rebuild: Track len=%d" % len(ptlistmerged),newmapid)
# Rebuild map
track = Track(ptlistmerged)
pwd = BuildMapFromTrack(track,newmapid,newmapid,'Result of merge',user,options)
Log("MergeCgi: finished",newmapid)
# Redirect to map
return redirect('/showmap/%s'%newmapid)
@application.route('/delmaps/<mapidsliststr>/<user>/<sess>')
def delmaps(mapidsliststr,user,sess):
if not CheckSession(user,sess):
message = gettext('Cannot identify user %s %s')%(user,sess)
else:
try:
mapids = mapidsliststr.split(',')
message = ''
for mapid in mapids:
map_user = DbGet(mapid,'trackuser')
if len(map_user)>0 and map_user==user:
DbDelMap(mapid)
os.remove('data/mapdata/%s.json.gz'%mapid)
message += gettext('Map %s deleted. ')%mapid
else:
message += gettext('Map %s do not belong to you')%mapid
break
except Exception, e:
message += gettext('Error: %s')%e
return render_template('map_deleted.html',message=message)
## Prepare
@application.route('/<lang>/prepare',defaults={'map_type':'GeoPortal','pts':[],'names':[]})
@application.route('/<lang>/prepare/<map_type>',defaults={'pts':[],'names':[]})
@application.route('/<lang>/prepare/<map_type>/<pts>',defaults={'names':None})
@application.route('/<lang>/prepare/<map_type>/<pts>/<names>')<|fim▁hole|>@application.route('/prepare',defaults={'lang':None,'map_type':'GeoPortal','pts':[],'names':[]})
@application.route('/prepare/<map_type>',defaults={'lang':None,'pts':[],'names':[]})
@application.route('/prepare/<map_type>/<pts>',defaults={'lang':None,'names':None})
@application.route('/prepare/<map_type>/<pts>/<names>',defaults={'lang':None})
def prepare(lang,map_type,pts,names):
if lang!=None:
session['lang']=lang
return render_template('prepare.html',domain=config['domain'],map_type=map_type,GMapsApiKey=keysnpwds['GMapsApiKey'],GeoPortalApiKey=keysnpwds['GeoPortalApiKey'])
# Backward compatibility
@application.route('/prepare.php?ptlist=<ptlist>',defaults={'lang':None})
#@application.route('/fr/prepare.php',defaults={'lang':'fr'})
def prepare_php(lang):
pts=request.args.get('ptlist')
maptype=request.args.get('maptype')
names=request.args.get('names')
return prepare(lang,maptype,pts,names)
@application.route('/ele/<float:lat>/<float:lon>')
def getele(lat,lon):
return Response('%d'%GetEleFromLatLon(lat,lon), mimetype='text/plain')
def PtStr2FloatArray(ptstr):
out = ptstr.split(',')
return (float(out[0]),float(out[1]))
@application.route('/profile/<ptliststr>/<width>/<height>')
def profile(ptliststr,width,height):
ptlist = map(PtStr2FloatArray,ptliststr.split('~'))
if(len(ptlist)<2):
return Response(gettext('Error: Cannot compute profile for only one point'), mimetype='text/plain')
nbpts = 400
return Response('\n'.join(map(str,ComputeProfile(ptlist,nbpts,width,height))), mimetype='text/plain')
@application.route('/prepare/export/<format>/<ptlist>/<names>')
def prepare_export(format,ptlist,names):
# TODO: build it from client side
pass
## Misc
@application.route('/<lang>/mobile')
@application.route('/mobile',defaults={'lang':None})
def mobile(lang):
if lang!=None:
session['lang']=lang
return render_template('mobile.html')
@application.route('/<lang>/tour')
@application.route('/tour',defaults={'lang':None})
def tour(lang):
if lang!=None:
session['lang']=lang
return render_template('tour.html')
## Add .min.js in all templates if debug mode is true
@application.context_processor
def inject_min_js():
if application.debug:
return {'minify':''}
else:
return {'minify':'.min'}
## Program entry point
if __name__ == '__main__':
# Start web server
if len(sys.argv)==2:
if sys.argv[1] in ('-h','--help'):
print 'Usage: %s [bindingip]' % sys.argv[0]
exit()
else:
host = sys.argv[1]
else:
host = "127.0.0.1"
application.run(port=8080,debug=True,host=host)<|fim▁end|>
| |
<|file_name|>cldrtree_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cldrtree
import (
"bytes"
"flag"
"io/ioutil"
"math/rand"
"path/filepath"
"reflect"
"regexp"
"strings"
"testing"
"golang.org/x/text/internal/gen"
"golang.org/x/text/language"
"golang.org/x/text/unicode/cldr"
)
var genOutput = flag.Bool("gen", false, "generate output files")
func TestAliasRegexp(t *testing.T) {
testCases := []struct {
alias string
want []string
}{{
alias: "miscPatterns[@numberSystem='latn']",
want: []string{
"miscPatterns[@numberSystem='latn']",
"miscPatterns",
"[@numberSystem='latn']",
"numberSystem",
"latn",
},
}, {
alias: `calendar[@type='greg-foo']/days/`,
want: []string{
"calendar[@type='greg-foo']",
"calendar",
"[@type='greg-foo']",
"type",
"greg-foo",
},
}, {
alias: "eraAbbr",
want: []string{
"eraAbbr",
"eraAbbr",
"",
"",
"",
},
}, {
// match must be anchored at beginning.
alias: `../calendar[@type='gregorian']/days/`,
}}
for _, tc := range testCases {
t.Run(tc.alias, func(t *testing.T) {
got := aliasRe.FindStringSubmatch(tc.alias)
if !reflect.DeepEqual(got, tc.want) {
t.Errorf("got %v; want %v", got, tc.want)
}
})
}
}
func TestBuild(t *testing.T) {
tree1, _ := loadTestdata(t, "test1")
tree2, _ := loadTestdata(t, "test2")
// Constants for second test test
const (
calendar = iota
field
)
const (
month = iota
era
filler
cyclicNameSet
)
const (
abbreviated = iota
narrow
wide
)
testCases := []struct {
desc string
tree *Tree
locale string
path []uint16
isFeature bool
result string
}{{
desc: "und/chinese month format wide m1",
tree: tree1,
locale: "und",
path: path(calendar, 0, month, 0, wide, 0),
result: "cM01",
}, {
desc: "und/chinese month format wide m12",
tree: tree1,
locale: "und",<|fim▁hole|> }, {
desc: "und/non-existing value",
tree: tree1,
locale: "und",
path: path(calendar, 0, month, 0, wide, 13),
result: "",
}, {
desc: "und/dangi:chinese month format wide",
tree: tree1,
locale: "und",
path: path(calendar, 1, month, 0, wide, 0),
result: "cM01",
}, {
desc: "und/chinese month format abbreviated:wide",
tree: tree1,
locale: "und",
path: path(calendar, 0, month, 0, abbreviated, 0),
result: "cM01",
}, {
desc: "und/chinese month format narrow:wide",
tree: tree1,
locale: "und",
path: path(calendar, 0, month, 0, narrow, 0),
result: "cM01",
}, {
desc: "und/gregorian month format wide",
tree: tree1,
locale: "und",
path: path(calendar, 2, month, 0, wide, 1),
result: "gM02",
}, {
desc: "und/gregorian month format:stand-alone narrow",
tree: tree1,
locale: "und",
path: path(calendar, 2, month, 0, narrow, 0),
result: "1",
}, {
desc: "und/gregorian month stand-alone:format abbreviated",
tree: tree1,
locale: "und",
path: path(calendar, 2, month, 1, abbreviated, 0),
result: "gM01",
}, {
desc: "und/gregorian month stand-alone:format wide ",
tree: tree1,
locale: "und",
path: path(calendar, 2, month, 1, abbreviated, 0),
result: "gM01",
}, {
desc: "und/dangi:chinese format narrow:wide ",
tree: tree1,
locale: "und",
path: path(calendar, 1, month, 0, narrow, 3),
result: "cM04",
}, {
desc: "und/field era displayname 0",
tree: tree2,
locale: "und",
path: path(field, 0, 0, 0),
result: "Era",
}, {
desc: "en/field era displayname 0",
tree: tree2,
locale: "en",
path: path(field, 0, 0, 0),
result: "era",
}, {
desc: "und/calendar hebrew format wide 7-leap",
tree: tree2,
locale: "und",
path: path(calendar, 7, month, 0, wide, 13),
result: "Adar II",
}, {
desc: "en-GB:en-001:en:und/calendar hebrew format wide 7-leap",
tree: tree2,
locale: "en-GB",
path: path(calendar, 7, month, 0, wide, 13),
result: "Adar II",
}, {
desc: "und/buddhist month format wide 11",
tree: tree2,
locale: "und",
path: path(calendar, 0, month, 0, wide, 11),
result: "genWideM12",
}, {
desc: "en-GB/gregorian month stand-alone narrow 2",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, month, 1, narrow, 2),
result: "gbNarrowM3",
}, {
desc: "en-GB/gregorian month format narrow 3/missing in en-GB",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, month, 0, narrow, 3),
result: "enNarrowM4",
}, {
desc: "en-GB/gregorian month format narrow 3/missing in en and en-GB",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, month, 0, narrow, 6),
result: "gregNarrowM7",
}, {
desc: "en-GB/gregorian month format narrow 3/missing in en and en-GB",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, month, 0, narrow, 6),
result: "gregNarrowM7",
}, {
desc: "en-GB/gregorian era narrow",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, era, abbreviated, 0, 1),
isFeature: true,
result: "AD",
}, {
desc: "en-GB/gregorian era narrow",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, era, narrow, 0, 0),
isFeature: true,
result: "BC",
}, {
desc: "en-GB/gregorian era narrow",
tree: tree2,
locale: "en-GB",
path: path(calendar, 6, era, wide, 1, 0),
isFeature: true,
result: "Before Common Era",
}, {
desc: "en-GB/dangi:chinese cyclicName, months, format, narrow:abbreviated 2",
tree: tree2,
locale: "en-GB",
path: path(calendar, 1, cyclicNameSet, 3, 0, 1, 1),
isFeature: true,
result: "year2",
}, {
desc: "en-GB/field era-narrow ",
tree: tree2,
locale: "en-GB",
path: path(field, 2, 0, 0),
result: "era",
}, {
desc: "en-GB/field month-narrow relativeTime future one",
tree: tree2,
locale: "en-GB",
path: path(field, 5, 2, 0, 1),
isFeature: true,
result: "001NarrowFutMOne",
}, {
// Don't fall back to the one of "en".
desc: "en-GB/field month-short relativeTime past one:other",
tree: tree2,
locale: "en-GB",
path: path(field, 4, 2, 1, 1),
isFeature: true,
result: "001ShortPastMOther",
}, {
desc: "en-GB/field month relativeTime future two:other",
tree: tree2,
locale: "en-GB",
path: path(field, 3, 2, 0, 2),
isFeature: true,
result: "enFutMOther",
}}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
tag, _ := language.CompactIndex(language.MustParse(tc.locale))
s := tc.tree.lookup(tag, tc.isFeature, tc.path...)
if s != tc.result {
t.Errorf("got %q; want %q", s, tc.result)
}
})
}
}
func path(e ...uint16) []uint16 { return e }
func TestGen(t *testing.T) {
testCases := []string{"test1", "test2"}
for _, tc := range testCases {
t.Run(tc, func(t *testing.T) {
_, got := loadTestdata(t, tc)
// Remove sizes that may vary per architecture.
re := regexp.MustCompile("// Size: [0-9]*")
got = re.ReplaceAllLiteral(got, []byte("// Size: xxxx"))
re = regexp.MustCompile("// Total table size [0-9]*")
got = re.ReplaceAllLiteral(got, []byte("// Total table size: xxxx"))
file := filepath.Join("testdata", tc, "output.go")
if *genOutput {
ioutil.WriteFile(file, got, 0700)
t.SkipNow()
}
b, err := ioutil.ReadFile(file)
if err != nil {
t.Fatalf("failed to open file: %v", err)
}
if want := string(b); string(got) != want {
t.Log(string(got))
t.Errorf("files differ")
}
})
}
}
func loadTestdata(t *testing.T, test string) (tree *Tree, file []byte) {
b := New("test")
var d cldr.Decoder
data, err := d.DecodePath(filepath.Join("testdata", test))
if err != nil {
t.Fatalf("error decoding testdata: %v", err)
}
context := Enum("context")
widthMap := func(s string) string {
// Align era with width values.
if r, ok := map[string]string{
"eraAbbr": "abbreviated",
"eraNarrow": "narrow",
"eraNames": "wide",
}[s]; ok {
s = r
}
return "w" + strings.Title(s)
}
width := EnumFunc("width", widthMap, "abbreviated", "narrow", "wide")
r := rand.New(rand.NewSource(0))
for _, loc := range data.Locales() {
ldml := data.RawLDML(loc)
x := b.Locale(language.Make(loc))
if x := x.Index(ldml.Dates.Calendars); x != nil {
for _, cal := range ldml.Dates.Calendars.Calendar {
x := x.IndexFromType(cal)
if x := x.Index(cal.Months); x != nil {
for _, mc := range cal.Months.MonthContext {
x := x.IndexFromType(mc, context)
for _, mw := range mc.MonthWidth {
x := x.IndexFromType(mw, width)
for _, m := range mw.Month {
x.SetValue(m.Type+m.Yeartype, m)
}
}
}
}
if x := x.Index(cal.CyclicNameSets); x != nil {
for _, cns := range cal.CyclicNameSets.CyclicNameSet {
x := x.IndexFromType(cns)
for _, cc := range cns.CyclicNameContext {
x := x.IndexFromType(cc, context)
for _, cw := range cc.CyclicNameWidth {
x := x.IndexFromType(cw, width)
for _, c := range cw.CyclicName {
x.SetValue(c.Type, c)
}
}
}
}
}
if x := x.Index(cal.Eras); x != nil {
opts := []Option{width, SharedType()}
if x := x.Index(cal.Eras.EraNames, opts...); x != nil {
for _, e := range cal.Eras.EraNames.Era {
x.IndexFromAlt(e).SetValue(e.Type, e)
}
}
if x := x.Index(cal.Eras.EraAbbr, opts...); x != nil {
for _, e := range cal.Eras.EraAbbr.Era {
x.IndexFromAlt(e).SetValue(e.Type, e)
}
}
if x := x.Index(cal.Eras.EraNarrow, opts...); x != nil {
for _, e := range cal.Eras.EraNarrow.Era {
x.IndexFromAlt(e).SetValue(e.Type, e)
}
}
}
{
// Ensure having more than 2 buckets.
f := x.IndexWithName("filler")
b := make([]byte, maxStrlen)
opt := &options{parent: x}
r.Read(b)
f.setValue("0", string(b), opt)
}
}
}
if x := x.Index(ldml.Dates.Fields); x != nil {
for _, f := range ldml.Dates.Fields.Field {
x := x.IndexFromType(f)
for _, d := range f.DisplayName {
x.Index(d).SetValue("", d)
}
for _, r := range f.Relative {
x.Index(r).SetValue(r.Type, r)
}
for _, rt := range f.RelativeTime {
x := x.Index(rt).IndexFromType(rt)
for _, p := range rt.RelativeTimePattern {
x.SetValue(p.Count, p)
}
}
for _, rp := range f.RelativePeriod {
x.Index(rp).SetValue("", rp)
}
}
}
}
tree, err = build(b)
if err != nil {
t.Fatal("error building tree:", err)
}
w := gen.NewCodeWriter()
generate(b, tree, w)
buf := &bytes.Buffer{}
if _, err = w.WriteGo(buf, "test"); err != nil {
t.Fatal("error generating code:", err)
}
return tree, buf.Bytes()
}<|fim▁end|>
|
path: path(calendar, 0, month, 0, wide, 11),
result: "cM12",
|
<|file_name|>boolean_single.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
var argv = require('yargs/yargs')(process.argv.slice(2))
.boolean(['r','v'])<|fim▁hole|>console.dir(argv._);<|fim▁end|>
|
.argv
;
console.dir([ argv.r, argv.v ]);
|
<|file_name|>AdobeReaderUpdatesURLProvider.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2014: wycomco GmbH ([email protected])
# 2015: modifications by Tim Sutton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""See docstring for AdobeReaderURLProvider class"""
# Disabling warnings for env members and imports that only affect recipe-
# specific processors.
#pylint: disable=e1101
import urllib2
import plistlib
from autopkglib import Processor, ProcessorError
__all__ = ["AdobeReaderUpdatesURLProvider"]
MAJOR_VERSION_DEFAULT = "11"
CHECK_OS_VERSION_DEFAULT = "10.8"
MAJOR_VERSION_MATCH_STR = "adobe/reader/mac/%s"
AR_UPDATER_DOWNLOAD_URL = (
"http://download.adobe.com/"
"pub/adobe/reader/mac/%s.x/%s/misc/AdbeRdrUpd%s.dmg")
AR_UPDATER_DOWNLOAD_URL2 = "http://ardownload.adobe.com"
AR_UPDATER_BASE_URL = "https://armmf.adobe.com/arm-manifests/mac"
AR_URL_TEMPLATE = "/%s/current_version_url_template.txt"
AR_MANIFEST_TEMPLATE = "/%s/manifest_url_template.txt"
AR_MAJREV_IDENTIFIER = "{MAJREV}"
OSX_MAJREV_IDENTIFIER = "{OS_VER_MAJ}"
OSX_MINREV_IDENTIFIER = "{OS_VER_MIN}"
AR_PROD_IDENTIFIER = '{PROD}'<|fim▁hole|>AR_PROD_ARCH_IDENTIFIER = '{PROD_ARCH}'
AR_PROD = 'com_adobe_Reader'
AR_PROD_ARCH = 'univ'
class AdobeReaderUpdatesURLProvider(Processor):
"""Provides URL to the latest Adobe Reader release."""
description = __doc__
input_variables = {
"major_version": {
"required": False,
"description": ("Major version. Examples: '10', '11'. Defaults to "
"%s" % MAJOR_VERSION_DEFAULT)
},
"os_version": {
"required": False,
"default": CHECK_OS_VERSION_DEFAULT,
"description": ("Version of OS X to check. Default: %s" %
CHECK_OS_VERSION_DEFAULT)
}
}
output_variables = {
"url": {
"description": "URL to the latest Adobe Reader release.",
},
"version": {
"description": "Version for this update.",
},
}
def get_reader_updater_pkg_url(self, major_version):
'''Returns download URL for Adobe Reader Updater DMG'''
request = urllib2.Request(
AR_UPDATER_BASE_URL + AR_MANIFEST_TEMPLATE % major_version)
try:
url_handle = urllib2.urlopen(request)
version_string = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't open manifest template: %s" % (err))
os_maj, os_min = self.env["os_version"].split(".")
version_string = version_string.replace(
AR_MAJREV_IDENTIFIER, major_version)
version_string = version_string.replace(OSX_MAJREV_IDENTIFIER, os_maj)
version_string = version_string.replace(OSX_MINREV_IDENTIFIER, os_min)
version_string = version_string.replace(AR_PROD_IDENTIFIER, AR_PROD)
version_string = version_string.replace(AR_PROD_ARCH_IDENTIFIER, AR_PROD_ARCH)
request = urllib2.Request(
AR_UPDATER_BASE_URL + version_string)
try:
url_handle = urllib2.urlopen(request)
plist = plistlib.readPlistFromString(url_handle.read())
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't get or read manifest: %s" % (err))
url = AR_UPDATER_DOWNLOAD_URL2 + plist['PatchURL']
return url
def get_reader_updater_dmg_url(self, major_version):
'''Returns download URL for Adobe Reader Updater DMG'''
request = urllib2.Request(
AR_UPDATER_BASE_URL + AR_URL_TEMPLATE % major_version)
try:
url_handle = urllib2.urlopen(request)
version_string = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't open URL template: %s" % (err))
os_maj, os_min = self.env["os_version"].split(".")
version_string = version_string.replace(
AR_MAJREV_IDENTIFIER, major_version)
version_string = version_string.replace(OSX_MAJREV_IDENTIFIER, os_maj)
version_string = version_string.replace(OSX_MINREV_IDENTIFIER, os_min)
request = urllib2.Request(
AR_UPDATER_BASE_URL + version_string)
try:
url_handle = urllib2.urlopen(request)
version = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't get version string: %s" % (err))
versioncode = version.replace('.', '')
url = AR_UPDATER_DOWNLOAD_URL % (major_version, version, versioncode)
return (url, version)
def main(self):
major_version = self.env.get("major_version", MAJOR_VERSION_DEFAULT)
(url, version) = self.get_reader_updater_dmg_url(major_version)
# only need the version, getting the URL from the manifest now
url = self.get_reader_updater_pkg_url(major_version)
self.env["url"] = url
self.env["version"] = version
self.output("Found URL %s" % self.env["url"])
if __name__ == "__main__":
PROCESSOR = AdobeReaderUpdatesURLProvider()
PROCESSOR.execute_shell()<|fim▁end|>
| |
<|file_name|>send-iloop.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32
extern mod extra;
<|fim▁hole|>use std::task;
use std::uint;
fn die() {
fail!();
}
fn iloop() {
task::spawn(|| die() );
let (p, c) = comm::stream::<()>();
loop {
// Sending and receiving here because these actions yield,
// at which point our child can kill us.
c.send(());
p.recv();
// The above comment no longer makes sense but I'm
// reluctant to remove a linked failure test case.
task::yield();
}
}
pub fn main() {
for uint::range(0u, 16u) |_i| {
task::spawn_unlinked(|| iloop() );
}
}<|fim▁end|>
|
use std::comm;
|
<|file_name|>SoundTouchEffect.cpp<|end_file_name|><|fim▁begin|>/**********************************************************************
Audacity: A Digital Audio Editor
SoundTouchEffect.cpp
Dominic Mazzoni, Vaughan Johnson
This abstract class contains all of the common code for an
effect that uses SoundTouch to do its processing (ChangeTempo
and ChangePitch).
**********************************************************************/
#include "../Audacity.h"
#if USE_SOUNDTOUCH
#include <math.h>
<|fim▁hole|>#include "../LabelTrack.h"
#include "../WaveTrack.h"
#include "../Project.h"
#include "SoundTouchEffect.h"
#include "TimeWarper.h"
#include "../NoteTrack.h"
bool EffectSoundTouch::ProcessLabelTrack(Track *track)
{
// SetTimeWarper(new RegionTimeWarper(mCurT0, mCurT1,
// new LinearTimeWarper(mCurT0, mCurT0,
// mCurT1, mCurT0 + (mCurT1-mCurT0)*mFactor)));
LabelTrack *lt = (LabelTrack*)track;
if (lt == NULL) return false;
lt->WarpLabels(*GetTimeWarper());
return true;
}
#ifdef USE_MIDI
bool EffectSoundTouch::ProcessNoteTrack(Track *track)
{
NoteTrack *nt = (NoteTrack *) track;
if (nt == NULL) return false;
nt->WarpAndTransposeNotes(mCurT0, mCurT1, *GetTimeWarper(), mSemitones);
return true;
}
#endif
bool EffectSoundTouch::Process()
{
// Assumes that mSoundTouch has already been initialized
// by the subclass for subclass-specific parameters. The
// time warper should also be set.
// Check if this effect will alter the selection length; if so, we need
// to operate on sync-lock selected tracks.
bool mustSync = true;
if (mT1 == GetTimeWarper()->Warp(mT1)) {
mustSync = false;
}
//Iterate over each track
// Needs Track::All for sync-lock grouping.
this->CopyInputTracks(Track::All);
bool bGoodResult = true;
TrackListIterator iter(mOutputTracks);
Track* t;
mCurTrackNum = 0;
m_maxNewLength = 0.0;
t = iter.First();
while (t != NULL) {
if (t->GetKind() == Track::Label &&
(t->GetSelected() || (mustSync && t->IsSyncLockSelected())) )
{
if (!ProcessLabelTrack(t))
{
bGoodResult = false;
break;
}
}
#ifdef USE_MIDI
else if (t->GetKind() == Track::Note &&
(t->GetSelected() || (mustSync && t->IsSyncLockSelected())))
{
if (!ProcessNoteTrack(t))
{
bGoodResult = false;
break;
}
}
#endif
else if (t->GetKind() == Track::Wave && t->GetSelected())
{
WaveTrack* leftTrack = (WaveTrack*)t;
//Get start and end times from track
mCurT0 = leftTrack->GetStartTime();
mCurT1 = leftTrack->GetEndTime();
//Set the current bounds to whichever left marker is
//greater and whichever right marker is less
mCurT0 = wxMax(mT0, mCurT0);
mCurT1 = wxMin(mT1, mCurT1);
// Process only if the right marker is to the right of the left marker
if (mCurT1 > mCurT0) {
sampleCount start, end;
if (leftTrack->GetLinked()) {
double t;
WaveTrack* rightTrack = (WaveTrack*)(iter.Next());
//Adjust bounds by the right tracks markers
t = rightTrack->GetStartTime();
t = wxMax(mT0, t);
mCurT0 = wxMin(mCurT0, t);
t = rightTrack->GetEndTime();
t = wxMin(mT1, t);
mCurT1 = wxMax(mCurT1, t);
//Transform the marker timepoints to samples
start = leftTrack->TimeToLongSamples(mCurT0);
end = leftTrack->TimeToLongSamples(mCurT1);
//Inform soundtouch there's 2 channels
mSoundTouch->setChannels(2);
//ProcessStereo() (implemented below) processes a stereo track
if (!ProcessStereo(leftTrack, rightTrack, start, end))
{
bGoodResult = false;
break;
}
mCurTrackNum++; // Increment for rightTrack, too.
} else {
//Transform the marker timepoints to samples
start = leftTrack->TimeToLongSamples(mCurT0);
end = leftTrack->TimeToLongSamples(mCurT1);
//Inform soundtouch there's a single channel
mSoundTouch->setChannels(1);
//ProcessOne() (implemented below) processes a single track
if (!ProcessOne(leftTrack, start, end))
{
bGoodResult = false;
break;
}
}
}
mCurTrackNum++;
}
else if (mustSync && t->IsSyncLockSelected()) {
t->SyncLockAdjust(mT1, GetTimeWarper()->Warp(mT1));
}
//Iterate to the next track
t = iter.Next();
}
if (bGoodResult)
ReplaceProcessedTracks(bGoodResult);
delete mSoundTouch;
mSoundTouch = NULL;
// mT0 = mCurT0;
// mT1 = mCurT0 + m_maxNewLength; // Update selection.
return bGoodResult;
}
//ProcessOne() takes a track, transforms it to bunch of buffer-blocks,
//and executes ProcessSoundTouch on these blocks
bool EffectSoundTouch::ProcessOne(WaveTrack *track,
sampleCount start, sampleCount end)
{
WaveTrack *outputTrack;
sampleCount s;
mSoundTouch->setSampleRate((unsigned int)(track->GetRate()+0.5));
outputTrack = mFactory->NewWaveTrack(track->GetSampleFormat(), track->GetRate());
//Get the length of the buffer (as double). len is
//used simple to calculate a progress meter, so it is easier
//to make it a double now than it is to do it later
double len = (double)(end - start);
//Initiate a processing buffer. This buffer will (most likely)
//be shorter than the length of the track being processed.
float *buffer = new float[track->GetMaxBlockSize()];
//Go through the track one buffer at a time. s counts which
//sample the current buffer starts at.
s = start;
while (s < end) {
//Get a block of samples (smaller than the size of the buffer)
sampleCount block = track->GetBestBlockSize(s);
//Adjust the block size if it is the final block in the track
if (s + block > end)
block = end - s;
//Get the samples from the track and put them in the buffer
track->Get((samplePtr) buffer, floatSample, s, block);
//Add samples to SoundTouch
mSoundTouch->putSamples(buffer, block);
//Get back samples from SoundTouch
unsigned int outputCount = mSoundTouch->numSamples();
if (outputCount > 0) {
float *buffer2 = new float[outputCount];
mSoundTouch->receiveSamples(buffer2, outputCount);
outputTrack->Append((samplePtr)buffer2, floatSample, outputCount);
delete[] buffer2;
}
//Increment s one blockfull of samples
s += block;
//Update the Progress meter
if (TrackProgress(mCurTrackNum, (s - start) / len))
return false;
}
// Tell SoundTouch to finish processing any remaining samples
mSoundTouch->flush(); // this should only be used for changeTempo - it dumps data otherwise with pRateTransposer->clear();
unsigned int outputCount = mSoundTouch->numSamples();
if (outputCount > 0) {
float *buffer2 = new float[outputCount];
mSoundTouch->receiveSamples(buffer2, outputCount);
outputTrack->Append((samplePtr)buffer2, floatSample, outputCount);
delete[] buffer2;
}
// Flush the output WaveTrack (since it's buffered, too)
outputTrack->Flush();
// Clean up the buffer
delete[]buffer;
// Take the output track and insert it in place of the original
// sample data
track->ClearAndPaste(mCurT0, mCurT1, outputTrack, true, false, GetTimeWarper());
double newLength = outputTrack->GetEndTime();
m_maxNewLength = wxMax(m_maxNewLength, newLength);
// Delete the outputTrack now that its data is inserted in place
delete outputTrack;
//Return true because the effect processing succeeded.
return true;
}
bool EffectSoundTouch::ProcessStereo(WaveTrack* leftTrack, WaveTrack* rightTrack,
sampleCount start, sampleCount end)
{
mSoundTouch->setSampleRate((unsigned int)(leftTrack->GetRate()+0.5));
WaveTrack* outputLeftTrack = mFactory->NewWaveTrack(leftTrack->GetSampleFormat(),
leftTrack->GetRate());
WaveTrack* outputRightTrack = mFactory->NewWaveTrack(rightTrack->GetSampleFormat(),
rightTrack->GetRate());
//Get the length of the buffer (as double). len is
//used simple to calculate a progress meter, so it is easier
//to make it a double now than it is to do it later
double len = (double)(end - start);
//Initiate a processing buffer. This buffer will (most likely)
//be shorter than the length of the track being processed.
// Make soundTouchBuffer twice as big as MaxBlockSize for each channel,
// because Soundtouch wants them interleaved, i.e., each
// Soundtouch sample is left-right pair.
sampleCount maxBlockSize = leftTrack->GetMaxBlockSize();
float* leftBuffer = new float[maxBlockSize];
float* rightBuffer = new float[maxBlockSize];
float* soundTouchBuffer = new float[maxBlockSize * 2];
// Go through the track one stereo buffer at a time.
// sourceSampleCount counts the sample at which the current buffer starts,
// per channel.
sampleCount sourceSampleCount = start;
while (sourceSampleCount < end) {
//Get a block of samples (smaller than the size of the buffer)
sampleCount blockSize = leftTrack->GetBestBlockSize(sourceSampleCount);
//Adjust the block size if it is the final block in the track
if (sourceSampleCount + blockSize > end)
blockSize = end - sourceSampleCount;
// Get the samples from the tracks and put them in the buffers.
leftTrack->Get((samplePtr)(leftBuffer), floatSample, sourceSampleCount, blockSize);
rightTrack->Get((samplePtr)(rightBuffer), floatSample, sourceSampleCount, blockSize);
// Interleave into soundTouchBuffer.
for (int index = 0; index < blockSize; index++) {
soundTouchBuffer[index*2] = leftBuffer[index];
soundTouchBuffer[(index*2)+1] = rightBuffer[index];
}
//Add samples to SoundTouch
mSoundTouch->putSamples(soundTouchBuffer, blockSize);
//Get back samples from SoundTouch
unsigned int outputCount = mSoundTouch->numSamples();
if (outputCount > 0)
this->ProcessStereoResults(outputCount, outputLeftTrack, outputRightTrack);
//Increment sourceSampleCount one blockfull of samples
sourceSampleCount += blockSize;
//Update the Progress meter
// mCurTrackNum is left track. Include right track.
int nWhichTrack = mCurTrackNum;
double frac = (sourceSampleCount - start) / len;
if (frac < 0.5)
frac *= 2.0; // Show twice as far for each track, because we're doing 2 at once.
else
{
nWhichTrack++;
frac -= 0.5;
frac *= 2.0; // Show twice as far for each track, because we're doing 2 at once.
}
if (TrackProgress(nWhichTrack, frac))
return false;
}
// Tell SoundTouch to finish processing any remaining samples
mSoundTouch->flush();
unsigned int outputCount = mSoundTouch->numSamples();
if (outputCount > 0)
this->ProcessStereoResults(outputCount, outputLeftTrack, outputRightTrack);
// Flush the output WaveTracks (since they're buffered, too)
outputLeftTrack->Flush();
outputRightTrack->Flush();
// Clean up the buffers.
delete [] leftBuffer;
delete [] rightBuffer;
delete [] soundTouchBuffer;
// Take the output tracks and insert in place of the original
// sample data.
leftTrack->ClearAndPaste(mCurT0, mCurT1, outputLeftTrack, true, false, GetTimeWarper());
rightTrack->ClearAndPaste(mCurT0, mCurT1, outputRightTrack, true, false, GetTimeWarper());
// Track the longest result length
double newLength = outputLeftTrack->GetEndTime();
m_maxNewLength = wxMax(m_maxNewLength, newLength);
newLength = outputRightTrack->GetEndTime();
m_maxNewLength = wxMax(m_maxNewLength, newLength);
// Delete the outputTracks now that their data are inserted in place.
delete outputLeftTrack;
delete outputRightTrack;
//Return true because the effect processing succeeded.
return true;
}
bool EffectSoundTouch::ProcessStereoResults(const unsigned int outputCount,
WaveTrack* outputLeftTrack,
WaveTrack* outputRightTrack)
{
float* outputSoundTouchBuffer = new float[outputCount*2];
mSoundTouch->receiveSamples(outputSoundTouchBuffer, outputCount);
// Dis-interleave outputSoundTouchBuffer into separate track buffers.
float* outputLeftBuffer = new float[outputCount];
float* outputRightBuffer = new float[outputCount];
for (unsigned int index = 0; index < outputCount; index++)
{
outputLeftBuffer[index] = outputSoundTouchBuffer[index*2];
outputRightBuffer[index] = outputSoundTouchBuffer[(index*2)+1];
}
outputLeftTrack->Append((samplePtr)outputLeftBuffer, floatSample, outputCount);
outputRightTrack->Append((samplePtr)outputRightBuffer, floatSample, outputCount);
delete[] outputSoundTouchBuffer;
delete[] outputLeftBuffer;
delete[] outputRightBuffer;
return true;
}
#endif // USE_SOUNDTOUCH<|fim▁end|>
| |
<|file_name|>ui_fontgenerator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'qt\ui\fontgen.ui'
#
# Created: Mon Jun 03 01:17:17 2013
# by: PyQt4 UI code generator 4.8.5
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_FontGenerator(object):
def setupUi(self, FontGenerator):
FontGenerator.setObjectName(_fromUtf8("FontGenerator"))
FontGenerator.resize(570, 493)
FontGenerator.setWindowTitle(QtGui.QApplication.translate("FontGenerator", "Font Generator - untitled[*]", None, QtGui.QApplication.UnicodeUTF8))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/monokuma-green.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
FontGenerator.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(FontGenerator)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.btnNew = QtGui.QPushButton(FontGenerator)
self.btnNew.setText(QtGui.QApplication.translate("FontGenerator", "&New", None, QtGui.QApplication.UnicodeUTF8))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/report.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnNew.setIcon(icon1)
self.btnNew.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+N", None, QtGui.QApplication.UnicodeUTF8))
self.btnNew.setAutoDefault(False)
self.btnNew.setObjectName(_fromUtf8("btnNew"))
self.horizontalLayout_2.addWidget(self.btnNew)
self.btnSave = QtGui.QPushButton(FontGenerator)
self.btnSave.setText(QtGui.QApplication.translate("FontGenerator", "&Save", None, QtGui.QApplication.UnicodeUTF8))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/disk.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSave.setIcon(icon2)
self.btnSave.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+S", None, QtGui.QApplication.UnicodeUTF8))
self.btnSave.setAutoDefault(False)
self.btnSave.setObjectName(_fromUtf8("btnSave"))
self.horizontalLayout_2.addWidget(self.btnSave)
self.btnSaveAs = QtGui.QPushButton(FontGenerator)
self.btnSaveAs.setText(QtGui.QApplication.translate("FontGenerator", "Save As...", None, QtGui.QApplication.UnicodeUTF8))
self.btnSaveAs.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+Shift+S", None, QtGui.QApplication.UnicodeUTF8))
self.btnSaveAs.setAutoDefault(False)
self.btnSaveAs.setObjectName(_fromUtf8("btnSaveAs"))<|fim▁hole|> icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/folder.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnLoad.setIcon(icon3)
self.btnLoad.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+O", None, QtGui.QApplication.UnicodeUTF8))
self.btnLoad.setAutoDefault(False)
self.btnLoad.setObjectName(_fromUtf8("btnLoad"))
self.horizontalLayout_2.addWidget(self.btnLoad)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.btnGenerateFont = QtGui.QPushButton(FontGenerator)
self.btnGenerateFont.setText(QtGui.QApplication.translate("FontGenerator", "&Generate", None, QtGui.QApplication.UnicodeUTF8))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/cog.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnGenerateFont.setIcon(icon4)
self.btnGenerateFont.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+G", None, QtGui.QApplication.UnicodeUTF8))
self.btnGenerateFont.setAutoDefault(False)
self.btnGenerateFont.setObjectName(_fromUtf8("btnGenerateFont"))
self.horizontalLayout_2.addWidget(self.btnGenerateFont)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.line = QtGui.QFrame(FontGenerator)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
self.tabFonts = QtGui.QTabWidget(FontGenerator)
self.tabFonts.setTabsClosable(False)
self.tabFonts.setMovable(True)
self.tabFonts.setObjectName(_fromUtf8("tabFonts"))
self.verticalLayout.addWidget(self.tabFonts)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.btnNewTab = QtGui.QPushButton(FontGenerator)
self.btnNewTab.setText(QtGui.QApplication.translate("FontGenerator", "Add Tab", None, QtGui.QApplication.UnicodeUTF8))
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/add.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnNewTab.setIcon(icon5)
self.btnNewTab.setAutoDefault(False)
self.btnNewTab.setObjectName(_fromUtf8("btnNewTab"))
self.horizontalLayout.addWidget(self.btnNewTab)
self.btnRemoveTab = QtGui.QPushButton(FontGenerator)
self.btnRemoveTab.setText(QtGui.QApplication.translate("FontGenerator", "Remove", None, QtGui.QApplication.UnicodeUTF8))
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/delete.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnRemoveTab.setIcon(icon6)
self.btnRemoveTab.setAutoDefault(False)
self.btnRemoveTab.setObjectName(_fromUtf8("btnRemoveTab"))
self.horizontalLayout.addWidget(self.btnRemoveTab)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout)
self.groupBox = QtGui.QGroupBox(FontGenerator)
self.groupBox.setTitle(QtGui.QApplication.translate("FontGenerator", "Export", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout_3.setContentsMargins(-1, 4, -1, 8)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.chkGenForGame = QtGui.QCheckBox(self.groupBox)
self.chkGenForGame.setText(QtGui.QApplication.translate("FontGenerator", "Export to umdimage2", None, QtGui.QApplication.UnicodeUTF8))
self.chkGenForGame.setChecked(True)
self.chkGenForGame.setObjectName(_fromUtf8("chkGenForGame"))
self.verticalLayout_2.addWidget(self.chkGenForGame)
self.chkGenForEditor = QtGui.QCheckBox(self.groupBox)
self.chkGenForEditor.setText(QtGui.QApplication.translate("FontGenerator", "Export to editor GFX dir", None, QtGui.QApplication.UnicodeUTF8))
self.chkGenForEditor.setChecked(True)
self.chkGenForEditor.setObjectName(_fromUtf8("chkGenForEditor"))
self.verticalLayout_2.addWidget(self.chkGenForEditor)
self.horizontalLayout_3.addLayout(self.verticalLayout_2)
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.rdoGenFont1 = QtGui.QRadioButton(self.groupBox)
self.rdoGenFont1.setText(QtGui.QApplication.translate("FontGenerator", "Font 01 (regular text)", None, QtGui.QApplication.UnicodeUTF8))
self.rdoGenFont1.setChecked(True)
self.rdoGenFont1.setObjectName(_fromUtf8("rdoGenFont1"))
self.verticalLayout_3.addWidget(self.rdoGenFont1)
self.rdoGenFont2 = QtGui.QRadioButton(self.groupBox)
self.rdoGenFont2.setText(QtGui.QApplication.translate("FontGenerator", "Font 02 (Class Trial minigame text)", None, QtGui.QApplication.UnicodeUTF8))
self.rdoGenFont2.setChecked(False)
self.rdoGenFont2.setObjectName(_fromUtf8("rdoGenFont2"))
self.verticalLayout_3.addWidget(self.rdoGenFont2)
self.horizontalLayout_3.addLayout(self.verticalLayout_3)
self.groupBox_2 = QtGui.QGroupBox(self.groupBox)
self.groupBox_2.setTitle(QtGui.QApplication.translate("FontGenerator", "Tab Priority", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_6.setContentsMargins(-1, 4, -1, 8)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.rdoLeftToRight = QtGui.QRadioButton(self.groupBox_2)
self.rdoLeftToRight.setText(QtGui.QApplication.translate("FontGenerator", "Left to right", None, QtGui.QApplication.UnicodeUTF8))
self.rdoLeftToRight.setChecked(True)
self.rdoLeftToRight.setObjectName(_fromUtf8("rdoLeftToRight"))
self.horizontalLayout_6.addWidget(self.rdoLeftToRight)
self.rdoRightToLeft = QtGui.QRadioButton(self.groupBox_2)
self.rdoRightToLeft.setText(QtGui.QApplication.translate("FontGenerator", "Right to left", None, QtGui.QApplication.UnicodeUTF8))
self.rdoRightToLeft.setObjectName(_fromUtf8("rdoRightToLeft"))
self.horizontalLayout_6.addWidget(self.rdoRightToLeft)
self.horizontalLayout_3.addWidget(self.groupBox_2)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem2)
self.verticalLayout.addWidget(self.groupBox)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem3)
self.btnClose = QtGui.QPushButton(FontGenerator)
self.btnClose.setText(QtGui.QApplication.translate("FontGenerator", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.btnClose.setAutoDefault(False)
self.btnClose.setObjectName(_fromUtf8("btnClose"))
self.horizontalLayout_4.addWidget(self.btnClose)
self.verticalLayout.addLayout(self.horizontalLayout_4)
self.retranslateUi(FontGenerator)
self.tabFonts.setCurrentIndex(-1)
QtCore.QObject.connect(self.btnNewTab, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.add_tab)
QtCore.QObject.connect(self.btnRemoveTab, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.remove_tab)
QtCore.QObject.connect(self.btnGenerateFont, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.generate_font)
QtCore.QObject.connect(self.btnClose, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.accept)
QtCore.QObject.connect(self.chkGenForGame, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.chkGenForEditor, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoGenFont1, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoGenFont2, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoLeftToRight, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoRightToLeft, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.btnNew, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.new_clicked)
QtCore.QObject.connect(self.btnSave, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.save_clicked)
QtCore.QObject.connect(self.btnSaveAs, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.save_as_clicked)
QtCore.QObject.connect(self.btnLoad, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.load_clicked)
QtCore.QMetaObject.connectSlotsByName(FontGenerator)
FontGenerator.setTabOrder(self.tabFonts, self.btnNewTab)
FontGenerator.setTabOrder(self.btnNewTab, self.btnRemoveTab)
FontGenerator.setTabOrder(self.btnRemoveTab, self.chkGenForGame)
FontGenerator.setTabOrder(self.chkGenForGame, self.chkGenForEditor)
FontGenerator.setTabOrder(self.chkGenForEditor, self.rdoGenFont1)
FontGenerator.setTabOrder(self.rdoGenFont1, self.rdoGenFont2)
FontGenerator.setTabOrder(self.rdoGenFont2, self.rdoLeftToRight)
FontGenerator.setTabOrder(self.rdoLeftToRight, self.rdoRightToLeft)
FontGenerator.setTabOrder(self.rdoRightToLeft, self.btnNew)
FontGenerator.setTabOrder(self.btnNew, self.btnSave)
FontGenerator.setTabOrder(self.btnSave, self.btnSaveAs)
FontGenerator.setTabOrder(self.btnSaveAs, self.btnLoad)
FontGenerator.setTabOrder(self.btnLoad, self.btnGenerateFont)
FontGenerator.setTabOrder(self.btnGenerateFont, self.btnClose)
def retranslateUi(self, FontGenerator):
pass
import icons_rc<|fim▁end|>
|
self.horizontalLayout_2.addWidget(self.btnSaveAs)
self.btnLoad = QtGui.QPushButton(FontGenerator)
self.btnLoad.setText(QtGui.QApplication.translate("FontGenerator", "&Open", None, QtGui.QApplication.UnicodeUTF8))
|
<|file_name|>negative_momentum_train_hook.py<|end_file_name|><|fim▁begin|>import torch
from hypergan.train_hooks.base_train_hook import BaseTrainHook
class NegativeMomentumTrainHook(BaseTrainHook):
def __init__(self, gan=None, config=None, trainer=None):
super().__init__(config=config, gan=gan, trainer=trainer)
self.d_grads = None
self.g_grads = None
def gradients(self, d_grads, g_grads):
if self.d_grads is None:
self.d_grads = [torch.zeros_like(_g) for _g in d_grads]
self.g_grads = [torch.zeros_like(_g) for _g in g_grads]
new_d_grads = [g.clone() for g in d_grads]
new_g_grads = [g.clone() for g in g_grads]<|fim▁hole|> self.g_grads = new_g_grads
return [d_grads, g_grads]<|fim▁end|>
|
d_grads = [_g - self.config.gamma * _g2 for _g, _g2 in zip(d_grads, self.d_grads)]
g_grads = [_g - self.config.gamma * _g2 for _g, _g2 in zip(g_grads, self.g_grads)]
self.d_grads = new_d_grads
|
<|file_name|>testingParser.py<|end_file_name|><|fim▁begin|>import parser
import logging
def test(code):
log = logging.getLogger()
parser.parser.parse(code, tracking=True)
print "Programa con 1 var y 1 asignacion bien: "
s = "program id; var beto: int; { id = 1234; }"<|fim▁hole|>print "Original: \n{0}".format(s)
print "\n"
print "Programa con 1 var mal: "
s = "program ; var beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa sin vars bien: "
s = "program id; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; var beto int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; var beto: int { id = 1234; }"
test(s);
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque vacio bien: "
s = "program id; var beto: int; { }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque lleno y estatuto mal: "
s = "program id; var beto: int; { id = 1234; id2 = 12345 }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque lleno y condicion mal: "
s = "program id; var beto: int; { id = 1234; if ( 8 > 3 ) { id3 = 34234; } else { } }"
test(s)
print "\n"
print "Original: \n{0}".format(s)<|fim▁end|>
|
test(s)
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(plugin)]
#![plugin(rocket_codegen)]
#![feature(custom_derive)]
extern crate chrono;
extern crate rocket;
extern crate postgres;
extern crate serde_json;
extern crate rocket_contrib;
#[macro_use] extern crate serde_derive;
mod webapp_config;
mod date_field;
use std::env;
use std::path::{Path, PathBuf};
use postgres::{Connection, TlsMode};
use chrono::{DateTime, UTC};
use rocket_contrib::JSON;
use rocket::response::{content, NamedFile};
#[macro_use]
extern crate lazy_static;
lazy_static!{
static ref DATABASE_URI: String = env::var("GDQ_DATABASE_URI").unwrap();
static ref CURRENT_EVENT_ID: String = env::var("GDQ_LIVE_EVENT_ID").unwrap();
}
static DONATAION_DATA_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 ORDER BY timestamp ASC";
static DONATAION_DATA_UPDATE_QUERY: &'static str = "SELECT id, timestamp, donation_count, donation_total, historic_total FROM DonationEntry WHERE event_id = $1 AND timestamp > $2 ORDER BY timestamp ASC";
#[derive(Serialize)]
struct DonationEntry {
timestamp: DateTime<UTC>,
count: i32,
total: i32,
total_2016: i32,
}
#[derive(Serialize)]
struct DataResponse(Vec<DonationEntry>);
#[derive(FromForm)]
struct DonationQuery {
since: date_field::DateField,
}<|fim▁hole|> let response_str = format!(include_str!("index.html"), static_base=webapp_config::get_static_base());
content::HTML(response_str)
}
#[get("/donation_data")]
fn get_donation_data() -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let query_result = db_connection.query(DONATAION_DATA_QUERY, &[&*CURRENT_EVENT_ID]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/donation_data/update?<update_form>")]
fn get_donation_data_update(update_form: DonationQuery) -> JSON<DataResponse> {
let database_uri: &str = &*DATABASE_URI;
let db_connection = Connection::connect(database_uri, TlsMode::None).unwrap();
let date_field::DateField(since_date) = update_form.since;
let query_result = db_connection.query(DONATAION_DATA_UPDATE_QUERY, &[&*CURRENT_EVENT_ID, &since_date]).unwrap();
let result: Vec<DonationEntry> = query_result.iter().map(|row| DonationEntry { timestamp: row.get(1), count: row.get(2), total: row.get(3), total_2016: row.get(4) }).collect();
JSON(DataResponse(result))
}
#[get("/static/<file..>")]
fn static_files(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("static/").join(file)).ok()
}
fn main() {
if webapp_config::use_local_static_handler() {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update, static_files]).launch()
} else {
rocket::ignite().mount("/", routes![index, get_donation_data, get_donation_data_update]).launch()
}
}<|fim▁end|>
|
#[get("/")]
fn index() -> content::HTML<String> {
|
<|file_name|>commentOnArrayElement1.ts<|end_file_name|><|fim▁begin|>const array = [
/* element 1*/
1
/* end of element 1 */,
2
<|fim▁hole|><|fim▁end|>
|
/* end of element 2 */
];
|
<|file_name|>angular-exec.js<|end_file_name|><|fim▁begin|>// @flow
//
// Copyright (c) 2018 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
import highland from "highland";
import angular from "angular";
import { querySelector } from "./dom-utils.js";
import type { HighlandStreamT } from "highland";
type stateServiceT = {
go: (name: string) => void
};
type cacheT = {
$state?: stateServiceT
};
const cache: cacheT = {};
type servicesT = "$state";
type methodsT = "go";
export default <R>(service: servicesT, method: methodsT, ...args: any[]): HighlandStreamT<R> => {
const s: HighlandStreamT<R> = highland();
const inj = angular.element(querySelector(document, "body")).injector();
function loop() {
if (cache[service]) {
const fn: Function = cache[service][method];<|fim▁hole|> const fn: Function = svc[method];
cache[service] = svc;
s.write(fn(...args));
s.end();
} else {
setTimeout(loop, 0);
}
}
loop();
return s;
};<|fim▁end|>
|
s.write(fn(...args));
s.end();
} else if (inj.has(service)) {
const svc = inj.get(service);
|
<|file_name|>legacyconfig_conversion.go<|end_file_name|><|fim▁begin|>package configconversion
import (
"net"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/serializer"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"fmt"
configv1 "github.com/openshift/api/config/v1"
kubecontrolplanev1 "github.com/openshift/api/kubecontrolplane/v1"
legacyconfigv1 "github.com/openshift/api/legacyconfig/v1"
openshiftcontrolplanev1 "github.com/openshift/api/openshiftcontrolplane/v1"
externaliprangerv1 "github.com/openshift/origin/pkg/service/admission/apis/externalipranger/v1"
restrictedendpointsv1 "github.com/openshift/origin/pkg/service/admission/apis/restrictedendpoints/v1"
)
func convertNetworkConfigToAdmissionConfig(masterConfig *legacyconfigv1.MasterConfig) error {
if masterConfig.AdmissionConfig.PluginConfig == nil {
masterConfig.AdmissionConfig.PluginConfig = map[string]*legacyconfigv1.AdmissionPluginConfig{}
}
scheme := runtime.NewScheme()
utilruntime.Must(externaliprangerv1.InstallLegacy(scheme))
utilruntime.Must(restrictedendpointsv1.InstallLegacy(scheme))
codecs := serializer.NewCodecFactory(scheme)
encoder := codecs.LegacyCodec(externaliprangerv1.SchemeGroupVersion, restrictedendpointsv1.SchemeGroupVersion)
// convert the networkconfig to admissionconfig
var restricted []string
restricted = append(restricted, masterConfig.NetworkConfig.ServiceNetworkCIDR)
for _, cidr := range masterConfig.NetworkConfig.ClusterNetworks {
restricted = append(restricted, cidr.CIDR)
}
restrictedEndpointConfig := &restrictedendpointsv1.RestrictedEndpointsAdmissionConfig{
RestrictedCIDRs: restricted,
}
restrictedEndpointConfigContent, err := runtime.Encode(encoder, restrictedEndpointConfig)
if err != nil {
return err
}
masterConfig.AdmissionConfig.PluginConfig["openshift.io/RestrictedEndpointsAdmission"] = &legacyconfigv1.AdmissionPluginConfig{
Configuration: runtime.RawExtension{Raw: restrictedEndpointConfigContent},
}
allowIngressIP := false
if _, ipNet, err := net.ParseCIDR(masterConfig.NetworkConfig.IngressIPNetworkCIDR); err == nil && !ipNet.IP.IsUnspecified() {
allowIngressIP = true
}
externalIPRangerAdmissionConfig := &externaliprangerv1.ExternalIPRangerAdmissionConfig{
ExternalIPNetworkCIDRs: masterConfig.NetworkConfig.ExternalIPNetworkCIDRs,
AllowIngressIP: allowIngressIP,
}
externalIPRangerAdmissionConfigContent, err := runtime.Encode(encoder, externalIPRangerAdmissionConfig)
if err != nil {
return err
}
masterConfig.AdmissionConfig.PluginConfig["ExternalIPRanger"] = &legacyconfigv1.AdmissionPluginConfig{
Configuration: runtime.RawExtension{Raw: externalIPRangerAdmissionConfigContent},<|fim▁hole|> return nil
}
// ConvertMasterConfigToKubeAPIServerConfig mutates it's input. This is acceptable because we do not need it by the time we get to 4.0.
func ConvertMasterConfigToKubeAPIServerConfig(input *legacyconfigv1.MasterConfig) (*kubecontrolplanev1.KubeAPIServerConfig, error) {
if err := convertNetworkConfigToAdmissionConfig(input); err != nil {
return nil, err
}
var err error
ret := &kubecontrolplanev1.KubeAPIServerConfig{
GenericAPIServerConfig: configv1.GenericAPIServerConfig{
CORSAllowedOrigins: input.CORSAllowedOrigins,
StorageConfig: configv1.EtcdStorageConfig{
StoragePrefix: input.EtcdStorageConfig.OpenShiftStoragePrefix,
},
},
ServicesSubnet: input.KubernetesMasterConfig.ServicesSubnet,
ServicesNodePortRange: input.KubernetesMasterConfig.ServicesNodePortRange,
ImagePolicyConfig: kubecontrolplanev1.KubeAPIServerImagePolicyConfig{
InternalRegistryHostname: input.ImagePolicyConfig.InternalRegistryHostname,
ExternalRegistryHostname: input.ImagePolicyConfig.ExternalRegistryHostname,
},
ProjectConfig: kubecontrolplanev1.KubeAPIServerProjectConfig{
DefaultNodeSelector: input.ProjectConfig.DefaultNodeSelector,
},
ServiceAccountPublicKeyFiles: input.ServiceAccountConfig.PublicKeyFiles,
// TODO this needs to be removed.
APIServerArguments: map[string]kubecontrolplanev1.Arguments{},
}
for k, v := range input.KubernetesMasterConfig.APIServerArguments {
ret.APIServerArguments[k] = v
}
// TODO this is likely to be a little weird. I think we override most of this in the operator
ret.ServingInfo, err = ToHTTPServingInfo(&input.ServingInfo)
if err != nil {
return nil, err
}
ret.AuditConfig, err = ToAuditConfig(&input.AuditConfig)
if err != nil {
return nil, err
}
ret.StorageConfig.EtcdConnectionInfo, err = ToEtcdConnectionInfo(&input.EtcdClientInfo)
if err != nil {
return nil, err
}
ret.OAuthConfig, err = ToOAuthConfig(input.OAuthConfig)
if err != nil {
return nil, err
}
ret.AuthConfig, err = ToMasterAuthConfig(&input.AuthConfig)
if err != nil {
return nil, err
}
ret.AggregatorConfig, err = ToAggregatorConfig(&input.AggregatorConfig)
if err != nil {
return nil, err
}
ret.KubeletClientInfo, err = ToKubeletConnectionInfo(&input.KubeletClientInfo)
if err != nil {
return nil, err
}
ret.AdmissionPluginConfig, err = ToAdmissionPluginConfigMap(input.AdmissionConfig.PluginConfig)
if err != nil {
return nil, err
}
ret.UserAgentMatchingConfig, err = ToUserAgentMatchingConfig(&input.PolicyConfig.UserAgentMatchingConfig)
if err != nil {
return nil, err
}
return ret, nil
}
// ConvertMasterConfigToKubeAPIServerConfig mutates it's input. This is acceptable because we do not need it by the time we get to 4.0.
func ConvertMasterConfigToOpenShiftAPIServerConfig(input *legacyconfigv1.MasterConfig) (*openshiftcontrolplanev1.OpenShiftAPIServerConfig, error) {
var err error
ret := &openshiftcontrolplanev1.OpenShiftAPIServerConfig{
GenericAPIServerConfig: configv1.GenericAPIServerConfig{
CORSAllowedOrigins: input.CORSAllowedOrigins,
StorageConfig: configv1.EtcdStorageConfig{
StoragePrefix: input.EtcdStorageConfig.OpenShiftStoragePrefix,
},
},
ImagePolicyConfig: openshiftcontrolplanev1.ImagePolicyConfig{
MaxImagesBulkImportedPerRepository: input.ImagePolicyConfig.MaxImagesBulkImportedPerRepository,
InternalRegistryHostname: input.ImagePolicyConfig.InternalRegistryHostname,
ExternalRegistryHostname: input.ImagePolicyConfig.ExternalRegistryHostname,
AdditionalTrustedCA: input.ImagePolicyConfig.AdditionalTrustedCA,
},
ProjectConfig: openshiftcontrolplanev1.ProjectConfig{
DefaultNodeSelector: input.ProjectConfig.DefaultNodeSelector,
ProjectRequestMessage: input.ProjectConfig.ProjectRequestMessage,
ProjectRequestTemplate: input.ProjectConfig.ProjectRequestTemplate,
},
RoutingConfig: openshiftcontrolplanev1.RoutingConfig{
Subdomain: input.RoutingConfig.Subdomain,
},
// TODO this needs to be removed.
APIServerArguments: map[string][]string{},
}
for k, v := range input.KubernetesMasterConfig.APIServerArguments {
ret.APIServerArguments[k] = v
}
// TODO this is likely to be a little weird. I think we override most of this in the operator
ret.ServingInfo, err = ToHTTPServingInfo(&input.ServingInfo)
if err != nil {
return nil, err
}
ret.KubeClientConfig, err = ToKubeClientConfig(&input.MasterClients)
if err != nil {
return nil, err
}
ret.AuditConfig, err = ToAuditConfig(&input.AuditConfig)
if err != nil {
return nil, err
}
ret.StorageConfig.EtcdConnectionInfo, err = ToEtcdConnectionInfo(&input.EtcdClientInfo)
if err != nil {
return nil, err
}
ret.AdmissionPluginConfig, err = ToAdmissionPluginConfigMap(input.AdmissionConfig.PluginConfig)
if err != nil {
return nil, err
}
ret.ImagePolicyConfig.AllowedRegistriesForImport, err = ToAllowedRegistries(input.ImagePolicyConfig.AllowedRegistriesForImport)
if err != nil {
return nil, err
}
if input.OAuthConfig != nil {
ret.ServiceAccountOAuthGrantMethod = openshiftcontrolplanev1.GrantHandlerType(string(input.OAuthConfig.GrantConfig.ServiceAccountMethod))
}
ret.JenkinsPipelineConfig, err = ToJenkinsPipelineConfig(&input.JenkinsPipelineConfig)
if err != nil {
return nil, err
}
if filenames, ok := input.KubernetesMasterConfig.APIServerArguments["cloud-config"]; ok {
if len(filenames) != 1 {
return nil, fmt.Errorf(`one or zero "--cloud-config" required, not %v`, filenames)
}
ret.CloudProviderFile = filenames[0]
}
return ret, nil
}<|fim▁end|>
|
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2016/11/23 16:15
# @Author : xycfree<|fim▁hole|>import os<|fim▁end|>
|
# @Link : http://example.org
# @Version : $
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>// ES2015 사용을 위한 babel 모듈 호출
import 'babel-polyfill';
// 전역 변수 객체 호출
import globalConfig from './helpers/global-config';
// npm 모듈 호출
import mobileDetect from 'mobile-detect';
//import scroll from 'scroll';
//import ease from 'ease-component';
import detectScrollPageObj from 'scroll-doc';
// devTools 호출
import devTools from './devtools/dev-tools';
import mirror from './devtools/mirror';
import preview from './devtools/preview';
// 헬퍼 모듈 호출
import catchEventTarget from './helpers/catch-event-target';
//import clipboardFunc from './helpers/clipboard-function';
//import cloneObj from './helpers/clone-obj';
//import colorAdjust from './helpers/color-adjust';
//import delayEvent from './helpers/delay-event';
import index from './helpers/index';
//import parents from './helpers/parents';
//import readingZero from './helpers/reading-zero';
//import toggleBoolean from './helpers/toggle-boolean';
import modifier from './helpers/modifier';
//import splitSearch from '../../app_helpers/split-search';
// 프로젝트 모듈 호출
//import {socketFunc} from './project/socket';
//import * as kbs from './project/kbs';
// 전역변수 선언
let socket;
document.addEventListener('DOMContentLoaded', () => {
// 돔 로드완료 이벤트
const WIN = window,
DOC = document,
MD = new mobileDetect(WIN.navigator.userAgent),
detectScrollPage = detectScrollPageObj();
if(MD.mobile()) console.log(`mobile DOM's been loaded`);
else console.log(`DOM's been loaded`);
DOC.addEventListener('click', (e) => {
// 클릭 이벤트 버블링
const eventTarget = catchEventTarget(e.target || e.srcElement);
console.log(eventTarget.target, eventTarget.findJsString);
switch(eventTarget.findJsString) {<|fim▁hole|> 1000,
{
duration: 1000,
ease: ease.inQuint
},
function (error, scrollLeft) {
}
);
modifier(
'toggle',
eventTarget.target,
'paging__elm--actived'
);
break;
default :
return false;
}
}, false);
WIN.addEventListener('load', () => {
// 윈도우 로드완료 이벤트
if(MD.mobile()) console.log(`mobile WINDOW's been loaded`);
else console.log(`WINDOW's been loaded`);
// socket = io();
// socketFunc(socket);
});
WIN.addEventListener('resize', () => {
// 윈도우 리사이즈 이벤트
// delayEvent(/*second*/, /*func*/);
});
WIN.addEventListener('keypress', (e) => {
const pressedKeyCode = e.which;
switch(pressedKeyCode) {
case 0:
// some Function
break;
default :
return false;
}
});
DOC.addEventListener('wheel', (e) => {
const eventTarget = catchEventTarget(e.target || e.srcElement);
switch(eventTarget.findJsString) {
case 'js-test':
console.log(eventTarget.target);
break;
default :
return false;
}
}, true);
DOC.addEventListener('touchstart', (e) => {
let touchObj = e.changedTouches[0];
});
DOC.addEventListener('touchmove', (e) => {
let touchObj = e.changedTouches[0];
});
DOC.addEventListener('touchend', (e) => {
let touchObj = e.changedTouches[0];
});
});<|fim▁end|>
|
case 'js-copy-link' :
console.log(index(eventTarget.target));
scroll.top(
detectScrollPage,
|
<|file_name|>lineinfile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Daniel Hokka Zakrisson <[email protected]>
# (c) 2014, Ahti Kitsik <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: lineinfile
author:
- "Daniel Hokka Zakrissoni (@dhozac)"
- "Ahti Kitsik (@ahtik)"
extends_documentation_fragment:
- files
- validate
short_description: Ensure a particular line is in a file, or replace an
existing line using a back-referenced regular expression.
description:
- This module will search a file for a line, and ensure that it is present or absent.
- This is primarily useful when you want to change a single line in
a file only. See the M(replace) module if you want to change
multiple, similar lines or check M(blockinfile) if you want to insert/update/remove a block of lines in a file.
For other cases, see the M(copy) or M(template) modules.
version_added: "0.7"
options:
dest:
required: true
aliases: [ name, destfile ]
description:
- The file to modify.
regexp:
required: false
version_added: 1.7
description:
- The regular expression to look for in every line of the file. For
C(state=present), the pattern to replace if found; only the last line
found will be replaced. For C(state=absent), the pattern of the line
to remove. Uses Python regular expressions; see
U(http://docs.python.org/2/library/re.html).
state:
required: false
choices: [ present, absent ]
default: "present"
aliases: []
description:
- Whether the line should be there or not.
line:
required: false
description:
- Required for C(state=present). The line to insert/replace into the
file. If C(backrefs) is set, may contain backreferences that will get
expanded with the C(regexp) capture groups if the regexp matches.
backrefs:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.1"
description:
- Used with C(state=present). If set, line can contain backreferences
(both positional and named) that will get populated if the C(regexp)
matches. This flag changes the operation of the module slightly;
C(insertbefore) and C(insertafter) will be ignored, and if the C(regexp)
doesn't match anywhere in the file, the file will be left unchanged.
If the C(regexp) does match, the last matching line will be replaced by
the expanded line parameter.
insertafter:
required: false
default: EOF
description:
- Used with C(state=present). If specified, the line will be inserted
after the last match of specified regular expression. A special value is
available; C(EOF) for inserting the line at the end of the file.
If specified regular expression has no matches, EOF will be used instead.
May not be used with C(backrefs).
choices: [ 'EOF', '*regex*' ]
insertbefore:
required: false
version_added: "1.1"
description:
- Used with C(state=present). If specified, the line will be inserted
before the last match of specified regular expression. A value is
available; C(BOF) for inserting the line at the beginning of the file.
If specified regular expression has no matches, the line will be
inserted at the end of the file. May not be used with C(backrefs).
choices: [ 'BOF', '*regex*' ]
create:
required: false
choices: [ "yes", "no" ]
default: "no"
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. By default it will fail if the file
is missing.
backup:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
others:
description:
- All arguments accepted by the M(file) module also work here.
required: false
"""
EXAMPLES = r"""
- lineinfile:
dest: /etc/selinux/config
regexp: '^SELINUX='
line: 'SELINUX=enforcing'
- lineinfile:
dest: /etc/sudoers
state: absent
regexp: '^%wheel'
- lineinfile:
dest: /etc/hosts
regexp: '^127\.0\.0\.1'
line: '127.0.0.1 localhost'
owner: root
group: root
mode: 0644
- lineinfile:
dest: /etc/httpd/conf/httpd.conf
regexp: '^Listen '
insertafter: '^#Listen '
line: 'Listen 8080'
- lineinfile:
dest: /etc/services
regexp: '^# port for http'
insertbefore: '^www.*80/tcp'
line: '# port for http by default'
# Add a line to a file if it does not exist, without passing regexp
- lineinfile:
dest: /tmp/testfile
line: '192.168.1.99 foo.lab.net foo'
# Fully quoted because of the ': ' on the line. See the Gotchas in the YAML docs.
- lineinfile: "
dest: /etc/sudoers
state: present
regexp: '^%wheel'
line: '%wheel ALL=(ALL) NOPASSWD: ALL'
- lineinfile:
dest: /opt/jboss-as/bin/standalone.conf
regexp: '^(.*)Xms(\d+)m(.*)$'
line: '\1Xms${xms}m\3'
backrefs: yes
# Validate the sudoers file before saving
- lineinfile:
dest: /etc/sudoers
state: present
regexp: '^%ADMIN ALL='
line: '%ADMIN ALL=(ALL) NOPASSWD: ALL'
validate: 'visudo -cf %s'
"""
import re
import os
import tempfile
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b
from ansible.module_utils._text import to_bytes, to_native
def write_changes(module, b_lines, dest):
tmpfd, tmpfile = tempfile.mkstemp()
f = os.fdopen(tmpfd, 'wb')
f.writelines(b_lines)
f.close()
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(to_bytes(validate % tmpfile, errors='surrogate_or_strict'))
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile,
to_native(os.path.realpath(to_bytes(dest, errors='surrogate_or_strict')), errors='surrogate_or_strict'),
unsafe_writes=module.params['unsafe_writes'])
def check_file_attrs(module, changed, message, diff):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False, diff=diff):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def present(module, dest, regexp, line, insertafter, insertbefore, create,
backup, backrefs):
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
if not create:
module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)
b_destpath = os.path.dirname(b_dest)
if not os.path.exists(b_destpath) and not module.check_mode:
os.makedirs(b_destpath)
b_lines = []
else:
f = open(b_dest, 'rb')
b_lines = f.readlines()
f.close()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
if insertafter not in (None, 'BOF', 'EOF'):
bre_ins = re.compile(to_bytes(insertafter, errors='surrogate_or_strict'))
elif insertbefore not in (None, 'BOF'):
bre_ins = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict'))
else:
bre_ins = None
# index[0] is the line num where regexp has been found
# index[1] is the line num where insertafter/inserbefore has been found
index = [-1, -1]
m = None
b_line = to_bytes(line, errors='surrogate_or_strict')
for lineno, b_cur_line in enumerate(b_lines):
if regexp is not None:
match_found = bre_m.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
index[0] = lineno
m = match_found
elif bre_ins is not None and bre_ins.search(b_cur_line):
if insertafter:
# + 1 for the next line
index[1] = lineno + 1
if insertbefore:
# + 1 for the previous line
index[1] = lineno
msg = ''
changed = False
# Regexp matched a line in the file
b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict')
if index[0] != -1:
if backrefs:
b_new_line = m.expand(b_line)
else:
# Don't do backref expansion if not asked.
b_new_line = b_line
if not b_new_line.endswith(b_linesep):
b_new_line += b_linesep
if b_lines[index[0]] != b_new_line:
b_lines[index[0]] = b_new_line
msg = 'line replaced'
changed = True
elif backrefs:
# Do absolutely nothing, since it's not safe generating the line
# without the regexp matching to populate the backrefs.
pass
# Add it to the beginning of the file
elif insertbefore == 'BOF' or insertafter == 'BOF':
b_lines.insert(0, b_line + b_linesep)
msg = 'line added'
changed = True
# Add it to the end of the file if requested or
# if insertafter/insertbefore didn't match anything
# (so default behaviour is to add at the end)
elif insertafter == 'EOF' or index[1] == -1:
# If the file is not empty then ensure there's a newline before the added line
if len(b_lines) > 0 and not b_lines[-1][-1:] in (b('\n'), b('\r')):
b_lines.append(b_linesep)
b_lines.append(b_line + b_linesep)
msg = 'line added'
changed = True
# insert* matched, but not the regexp
else:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line added'
changed = True
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup and os.path.exists(b_dest):
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if module.check_mode and not os.path.exists(b_dest):
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def absent(module, dest, regexp, line, backup):
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
module.exit_json(changed=False, msg="file not present")
msg = ''
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
f = open(b_dest, 'rb')
b_lines = f.readlines()
f.close()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_c = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
found = []
b_line = to_bytes(line, errors='surrogate_or_strict')
def matcher(b_cur_line):
if regexp is not None:
match_found = bre_c.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
found.append(b_cur_line)
return not match_found
b_lines = [l for l in b_lines if matcher(l)]
changed = len(found) > 0
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup:
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if changed:
msg = "%s line(s) removed" % len(found)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, found=len(found), msg=msg, backup=backupdest, diff=difflist)
def main():
module = AnsibleModule(
argument_spec=dict(
dest=dict(required=True, aliases=['name', 'destfile'], type='path'),
state=dict(default='present', choices=['absent', 'present']),
regexp=dict(default=None),
line=dict(aliases=['value']),
insertafter=dict(default=None),
insertbefore=dict(default=None),
backrefs=dict(default=False, type='bool'),
create=dict(default=False, type='bool'),
backup=dict(default=False, type='bool'),
validate=dict(default=None, type='str'),
),
mutually_exclusive=[['insertbefore', 'insertafter']],
add_file_common_args=True,
supports_check_mode=True
)
params = module.params
create = params['create']
backup = params['backup']
backrefs = params['backrefs']
dest = params['dest']
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.isdir(b_dest):
module.fail_json(rc=256, msg='Destination %s is a directory !' % dest)
if params['state'] == 'present':
if backrefs and params['regexp'] is None:
module.fail_json(msg='regexp= is required with backrefs=true')
<|fim▁hole|> module.fail_json(msg='line= is required with state=present')
# Deal with the insertafter default value manually, to avoid errors
# because of the mutually_exclusive mechanism.
ins_bef, ins_aft = params['insertbefore'], params['insertafter']
if ins_bef is None and ins_aft is None:
ins_aft = 'EOF'
line = params['line']
present(module, dest, params['regexp'], line,
ins_aft, ins_bef, create, backup, backrefs)
else:
if params['regexp'] is None and params.get('line', None) is None:
module.fail_json(msg='one of line= or regexp= is required with state=absent')
absent(module, dest, params['regexp'], params.get('line', None), backup)
if __name__ == '__main__':
main()<|fim▁end|>
|
if params.get('line', None) is None:
|
<|file_name|>datetime.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> * Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use chrono::{FixedOffset, TimeZone};
use lazy_static::lazy_static;
use mononoke_types::DateTime;
/// Return a `DateTime` corresponding to <year>-01-01 00:00:00 UTC.
pub fn day_1_utc(year: i32) -> DateTime {
DateTime::new(FixedOffset::west(0).ymd(year, 1, 1).and_hms(0, 0, 0))
}
/// Return a `DateTime` corresponding to <year>-01-01 00:00:00 UTC,
/// with the specified offset applied.
pub fn day_1_tz(year: i32, offset: i32) -> DateTime {
DateTime::new(FixedOffset::west(offset).ymd(year, 1, 1).and_hms(0, 0, 0))
}
pub const PST_OFFSET: i32 = 7 * 3600;
lazy_static! {
/// 1970-01-01 00:00:00 UTC.
pub static ref EPOCH_ZERO: DateTime = DateTime::from_timestamp(0, 0).unwrap();
/// 1970-01-01 00:00:00 UTC-07.
pub static ref EPOCH_ZERO_PST: DateTime = DateTime::from_timestamp(0, PST_OFFSET).unwrap();
/// 1900-01-01 00:00:00 UTC.
pub static ref YEAR_1900: DateTime = day_1_utc(1900);
/// 1900-01-01 00:00:00 UTC-07.
pub static ref YEAR_1900_PST: DateTime = day_1_tz(1900, PST_OFFSET);
/// 2000-01-01 00:00:00 UTC.
pub static ref YEAR_2000: DateTime = day_1_utc(2000);
/// 2000-01-01 00:00:00 UTC-07.
pub static ref YEAR_2000_PST: DateTime = day_1_tz(2000, PST_OFFSET);
/// 2100-01-01 00:00:00 UTC.
pub static ref YEAR_2100: DateTime = day_1_utc(2000);
pub static ref YEAR_2100_PST: DateTime = day_1_tz(2100, PST_OFFSET);
}<|fim▁end|>
|
/*
|
<|file_name|>file.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
import inspect
from django import forms
from django.conf import settings as globalsettings
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from django.contrib.admin.sites import site
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.db import models
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from filer.utils.compatibility import truncate_words
from filer.models import File
from filer import settings as filer_settings
import logging
logger = logging.getLogger(__name__)
class AdminFileWidget(ForeignKeyRawIdWidget):<|fim▁hole|> def render(self, name, value, attrs=None):
obj = self.obj_for_value(value)
css_id = attrs.get('id', 'id_image_x')
css_id_thumbnail_img = "%s_thumbnail_img" % css_id
css_id_description_txt = "%s_description_txt" % css_id
related_url = None
if value:
try:
file_obj = File.objects.get(pk=value)
related_url = file_obj.logical_folder.\
get_admin_directory_listing_url_path()
except Exception,e:
# catch exception and manage it. We can re-raise it for debugging
# purposes and/or just logging it, provided user configured
# proper logging configuration
if filer_settings.FILER_ENABLE_LOGGING:
logger.error('Error while rendering file widget: %s',e)
if filer_settings.FILER_DEBUG:
raise e
if not related_url:
related_url = reverse('admin:filer-directory_listing-last')
params = self.url_parameters()
if params:
lookup_url = '?' + '&'.join(
['%s=%s' % (k, v) for k, v in params.items()])
else:
lookup_url = ''
if not 'class' in attrs:
# The JavaScript looks for this hook.
attrs['class'] = 'vForeignKeyRawIdAdminField'
# rendering the super for ForeignKeyRawIdWidget on purpose here because
# we only need the input and none of the other stuff that
# ForeignKeyRawIdWidget adds
hidden_input = super(ForeignKeyRawIdWidget, self).render(
name, value, attrs)
filer_static_prefix = filer_settings.FILER_STATICMEDIA_PREFIX
if not filer_static_prefix[-1] == '/':
filer_static_prefix += '/'
context = {
'hidden_input': hidden_input,
'lookup_url': '%s%s' % (related_url, lookup_url),
'thumb_id': css_id_thumbnail_img,
'span_id': css_id_description_txt,
'object': obj,
'lookup_name': name,
'filer_static_prefix': filer_static_prefix,
'clear_id': '%s_clear' % css_id,
'id': css_id,
}
html = render_to_string('admin/filer/widgets/admin_file.html', context)
return mark_safe(html)
def label_for_value(self, value):
obj = self.obj_for_value(value)
return ' <strong>%s</strong>' % truncate_words(obj, 14)
def obj_for_value(self, value):
try:
key = self.rel.get_related_field().name
obj = self.rel.to._default_manager.get(**{key: value})
except:
obj = None
return obj
class Media:
js = (filer_settings.FILER_STATICMEDIA_PREFIX + 'js/popup_handling.js',)
class AdminFileFormField(forms.ModelChoiceField):
widget = AdminFileWidget
def __init__(self, rel, queryset, to_field_name, *args, **kwargs):
self.rel = rel
self.queryset = queryset
self.to_field_name = to_field_name
self.max_value = None
self.min_value = None
other_widget = kwargs.pop('widget', None)
if 'admin_site' in inspect.getargspec(self.widget.__init__)[0]: # Django 1.4
widget_instance = self.widget(rel, site)
else: # Django <= 1.3
widget_instance = self.widget(rel)
forms.Field.__init__(self, widget=widget_instance, *args, **kwargs)
def widget_attrs(self, widget):
widget.required = self.required
return {}
class FilerFileField(models.ForeignKey):
default_form_class = AdminFileFormField
default_model_class = File
def __init__(self, **kwargs):
# we call ForeignKey.__init__ with the Image model as parameter...
# a FilerImageFiled can only be a ForeignKey to a Image
return super(FilerFileField, self).__init__(
self.default_model_class, **kwargs)
def formfield(self, **kwargs):
# This is a fairly standard way to set up some defaults
# while letting the caller override them.
defaults = {
'form_class': self.default_form_class,
'rel': self.rel,
}
defaults.update(kwargs)
return super(FilerFileField, self).formfield(**defaults)
def south_field_triple(self):
"Returns a suitable description of this field for South."
# We'll just introspect ourselves, since we inherit.
from south.modelsinspector import introspector
field_class = "django.db.models.fields.related.ForeignKey"
args, kwargs = introspector(self)
# That's our definition!
return (field_class, args, kwargs)<|fim▁end|>
|
choices = None
|
<|file_name|>check_const.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::def::*;
use middle::ty;
use middle::typeck;
use util::ppaux;
use syntax::ast::*;
use syntax::ast_util;
use syntax::visit::Visitor;
use syntax::visit;
struct CheckCrateVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
in_const: bool
}
<|fim▁hole|> let was_const = self.in_const;
self.in_const = in_const;
f(self);
self.in_const = was_const;
}
fn inside_const(&mut self, f: |&mut CheckCrateVisitor<'a, 'tcx>|) {
self.with_const(true, f);
}
fn outside_const(&mut self, f: |&mut CheckCrateVisitor<'a, 'tcx>|) {
self.with_const(false, f);
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &Item) {
check_item(self, i);
}
fn visit_pat(&mut self, p: &Pat) {
check_pat(self, p);
}
fn visit_expr(&mut self, ex: &Expr) {
if check_expr(self, ex) {
visit::walk_expr(self, ex);
}
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, in_const: false },
tcx.map.krate());
tcx.sess.abort_if_errors();
}
fn check_item(v: &mut CheckCrateVisitor, it: &Item) {
match it.node {
ItemStatic(_, _, ref ex) |
ItemConst(_, ref ex) => {
v.inside_const(|v| v.visit_expr(&**ex));
}
ItemEnum(ref enum_definition, _) => {
for var in (*enum_definition).variants.iter() {
for ex in var.node.disr_expr.iter() {
v.inside_const(|v| v.visit_expr(&**ex));
}
}
}
_ => v.outside_const(|v| visit::walk_item(v, it))
}
}
fn check_pat(v: &mut CheckCrateVisitor, p: &Pat) {
fn is_str(e: &Expr) -> bool {
match e.node {
ExprBox(_, ref expr) => {
match expr.node {
ExprLit(ref lit) => ast_util::lit_is_str(&**lit),
_ => false,
}
}
_ => false,
}
}
match p.node {
// Let through plain ~-string literals here
PatLit(ref a) => if !is_str(&**a) { v.inside_const(|v| v.visit_expr(&**a)); },
PatRange(ref a, ref b) => {
if !is_str(&**a) { v.inside_const(|v| v.visit_expr(&**a)); }
if !is_str(&**b) { v.inside_const(|v| v.visit_expr(&**b)); }
}
_ => v.outside_const(|v| visit::walk_pat(v, p))
}
}
fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) -> bool {
if !v.in_const { return true }
match e.node {
ExprUnary(UnDeref, _) => {}
ExprUnary(UnUniq, _) => {
span_err!(v.tcx.sess, e.span, E0010,
"cannot do allocations in constant expressions");
return false;
}
ExprLit(ref lit) if ast_util::lit_is_str(&**lit) => {}
ExprBinary(..) | ExprUnary(..) => {
let method_call = typeck::MethodCall::expr(e.id);
if v.tcx.method_map.borrow().contains_key(&method_call) {
span_err!(v.tcx.sess, e.span, E0011,
"user-defined operators are not allowed in constant \
expressions");
}
}
ExprLit(_) => (),
ExprCast(_, _) => {
let ety = ty::expr_ty(v.tcx, e);
if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in a constant expression",
ppaux::ty_to_string(v.tcx, ety));
}
}
ExprPath(ref pth) => {
// NB: In the future you might wish to relax this slightly
// to handle on-demand instantiation of functions via
// foo::<bar> in a const. Currently that is only done on
// a path in trans::callee that only works in block contexts.
if !pth.segments.iter().all(|segment| segment.types.is_empty()) {
span_err!(v.tcx.sess, e.span, E0013,
"paths in constants may only refer to items without \
type parameters");
}
match v.tcx.def_map.borrow().find(&e.id) {
Some(&DefStatic(..)) |
Some(&DefConst(..)) |
Some(&DefFn(..)) |
Some(&DefVariant(_, _, _)) |
Some(&DefStruct(_)) => { }
Some(&def) => {
debug!("(checking const) found bad def: {}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in constants may only refer to constants \
or functions");
}
None => {
v.tcx.sess.span_bug(e.span, "unbound path in const?!");
}
}
}
ExprCall(ref callee, _) => {
match v.tcx.def_map.borrow().find(&callee.id) {
Some(&DefStruct(..)) |
Some(&DefVariant(..)) => {} // OK.
_ => {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in constants are limited to \
struct and enum constructors");
}
}
}
ExprBlock(ref block) => {
// Check all statements in the block
for stmt in block.stmts.iter() {
let block_span_err = |span|
span_err!(v.tcx.sess, span, E0016,
"blocks in constants are limited to items and \
tail expressions");
match stmt.node {
StmtDecl(ref span, _) => {
match span.node {
DeclLocal(_) => block_span_err(span.span),
// Item statements are allowed
DeclItem(_) => {}
}
}
StmtExpr(ref expr, _) => block_span_err(expr.span),
StmtSemi(ref semi, _) => block_span_err(semi.span),
StmtMac(..) => {
v.tcx.sess.span_bug(e.span, "unexpanded statement \
macro in const?!")
}
}
}
match block.expr {
Some(ref expr) => { check_expr(v, &**expr); }
None => {}
}
}
ExprVec(_) |
ExprAddrOf(MutImmutable, _) |
ExprParen(..) |
ExprField(..) |
ExprTupField(..) |
ExprIndex(..) |
ExprTup(..) |
ExprRepeat(..) |
ExprStruct(..) => {}
ExprAddrOf(_, ref inner) => {
match inner.node {
// Mutable slices are allowed.
ExprVec(_) => {}
_ => span_err!(v.tcx.sess, e.span, E0017,
"references in constants may only refer \
to immutable values")
}
}
_ => {
span_err!(v.tcx.sess, e.span, E0019,
"constant contains unimplemented expression type");
return false;
}
}
true
}<|fim▁end|>
|
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn with_const(&mut self, in_const: bool, f: |&mut CheckCrateVisitor<'a, 'tcx>|) {
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
See LICENSE file for copyright and license details.
"""
from app import app
from flask import render_template, flash, redirect
#from app.forms import LoginForm
from app.modules.constant import *
@app.route("/")
@app.route("/index")
@app.route("/index/")
@app.route("/<app_profile>/index")
@app.route("/<app_profile>/index/")
@app.route("/<app_profile>")
@app.route("/<app_profile>/")
def index(app_profile = AppProfile.PERSONAL):
"""
Index page
"""
user = { 'login': 'rockwolf' } # fake user
if app_profile == '':
app_profile = 'personal'
return render_template("index.html",
title = 'Central command entity',
user = user,
app_profile = app_profile.lower())
@app.route("/report_finance")
@app.route("/report_finance/")
@app.route("/<app_profile>/report_finance")
@app.route("/<app_profile>/report_finance/")
def report_finance(app_profile = AppProfile.PERSONAL):
"""
Financial reports.
"""
# Make reports per year in pdf (gnucash) and put links to them here.
return('TBD');
@app.route("/trading_journal")
@app.route("/trading_journal/")
@app.route("/<app_profile>/trading_journal")
@app.route("/<app_profile>/trading_journal/")
def trading_journal(app_profile = AppProfile.PERSONAL):
"""
Trading Journal
"""
if app_profile == AppProfile.ZIVLE:
return render_template("trading_journal.html",
title = 'Trading Journal',
user = user,
app_profile = app_profile.lower())
else:
return render_template("404.html",
title = '404')
@app.route("/contact")
@app.route("/contact/")
@app.route("/<app_profile>/contact")
@app.route("/<app_profile>/contact/")
def contact(app_profile = AppProfile.PERSONAL):
"""
Address book.
"""
# Try to sync this with abook? Can abook export them?
return('TBD');
@app.route("/task")
@app.route("/task/")
@app.route("/<app_profile>/task")
<|fim▁hole|>def task(app_profile = AppProfile.PERSONAL):
"""
Task and schedule information.
"""
# TODO: generate output of reminders and put it in a new text-file,
# e.g. remind ~/.reminders -c etc.
# TODO: where to schedule the reminders.txt generation?
if app_profile == AppProfile.ZIVLE:
task_file = TaskFile.ZIVLE
reminder_file = ReminderFile.ZIVLE
elif app_profile == AppProfile.PERSONAL:
task_file = TaskFile.PERSONAL
reminder_file = ReminderFile.PERSONAL
else:
error = true
if not error:
return render_template("task.html",
title = 'Tasks',
user = user,
app_profile = app_profile.lower(),
tasks = load_lines(task_file),
reminders = load_lines(reminder_file)
)
else:
return render_template("404.html",
title = '404')
@app.route('/login', methods = ['GET', 'POST'])
@app.route('/login/', methods = ['GET', 'POST'])
def login():
form = LoginForm()
return render_template('login.html',
title = 'Sign In',
form = form)
@app.route("/links")
@app.route("/links/")
@app.route("/<app_profile>/links")
@app.route("/<app_profile>/links/")
def links(app_profile = AppProfile.PERSONAL):
"""
Link bookmarks.
"""
user = { 'login': 'rockwolf' } # fake user
# Try to read from text-files and build links dynamically
# Format: data/<profile>/links.txt
# Textfile format: <url>;<name>;<description>
#TODO: put links_file in constant.py
#or find a more general way to configure files?
#links_file = 'C:\\Users\\AN\\home\\other\\Dropbox\\cece\\app\\data\\' + app_profile + '\\links.txt'
links_file = '/home/rockwolf/Dropbox/cece/app/data/' + app_profile + '/links.txt'
links_full = load_lines(links_file)
links = []
for link_full in links_full:
links.append(link_full.split(';'))
links.sort(key=lambda k: k[1])
categories = []
for link in links:
if link[1] not in categories:
categories.append(link[1])
return render_template("links.html",
title = 'Bookmarks',
user = user,
app_profile = app_profile.lower(),
categories = categories,
total = len(links),
links = links
)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html',
title = '404'), 404
def load_lines(text_file):
"""
Reads the text file and returns a list of lines.
"""
lines = []
with open(text_file, encoding='utf-8') as text:
for line in text:
lines.append(line.strip())
return lines<|fim▁end|>
|
@app.route("/<app_profile>/task/")
|
<|file_name|>sns.go<|end_file_name|><|fim▁begin|>//
// goamz - Go packages to interact with the Amazon Web Services.
//
// https://wiki.ubuntu.com/goamz
//
// Copyright (c) 2011 Memeo Inc.
//
// Written by Prudhvi Krishna Surapaneni <[email protected]>
// This package is in an experimental state, and does not currently
// follow conventions and style of the rest of goamz or common
// Go conventions. It must be polished before it's considered a
// first-class package in goamz.
package sns
// BUG(niemeyer): Package needs documentation.
import (
"encoding/xml"
"net/http"
"net/url"
"time"
"github.com/goamz/goamz/aws"
)
// The SNS type encapsulates operation with an SNS region.
type SNS struct {
aws.Auth
aws.Region
private byte // Reserve the right of using private data.
}
type AttributeEntry struct {
Key string `xml:"key"`
Value string `xml:"value"`
}
type ResponseMetadata struct {
RequestId string `xml:"ResponseMetadata>RequestId"`
BoxUsage float64 `xml:"ResponseMetadata>BoxUsage"`
}
func New(auth aws.Auth, region aws.Region) *SNS {
return &SNS{auth, region, 0}
}
func makeParams(action string) map[string]string {
params := make(map[string]string)
params["Action"] = action
return params
}
type Error struct {
StatusCode int
Code string
Message string
RequestId string
}
func (err *Error) Error() string {
return err.Message
}
type xmlErrors struct {
RequestId string
Errors []Error `xml:"Errors>Error"`
}
func (sns *SNS) query(params map[string]string, resp interface{}) error {
params["Timestamp"] = time.Now().UTC().Format(time.RFC3339)
u, err := url.Parse(sns.Region.SNSEndpoint)
if err != nil {
return err
}
sign(sns.Auth, "GET", "/", params, u.Host)
u.RawQuery = multimap(params).Encode()
r, err := http.Get(u.String())
if err != nil {
return err
}
defer r.Body.Close()
if r.StatusCode != http.StatusOK {
return buildError(r)
}
err = xml.NewDecoder(r.Body).Decode(resp)
return err
}
func buildError(r *http.Response) error {
errors := xmlErrors{}
xml.NewDecoder(r.Body).Decode(&errors)<|fim▁hole|> err.RequestId = errors.RequestId
err.StatusCode = r.StatusCode
if err.Message == "" {
err.Message = r.Status
}
return &err
}
func multimap(p map[string]string) url.Values {
q := make(url.Values, len(p))
for k, v := range p {
q[k] = []string{v}
}
return q
}<|fim▁end|>
|
var err Error
if len(errors.Errors) > 0 {
err = errors.Errors[0]
}
|
<|file_name|>OperatingSystemVersionConstantServiceSettings.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v9.services;
import com.google.ads.googleads.v9.resources.OperatingSystemVersionConstant;
import com.google.ads.googleads.v9.services.stub.OperatingSystemVersionConstantServiceStubSettings;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link OperatingSystemVersionConstantServiceClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li> The default service address (googleads.googleapis.com) and default port (443) are used.
* <li> Credentials are acquired automatically through Application Default Credentials.
* <li> Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getOperatingSystemVersionConstant to 30 seconds:
*
* <pre>{@code
* OperatingSystemVersionConstantServiceSettings.Builder
* operatingSystemVersionConstantServiceSettingsBuilder =
* OperatingSystemVersionConstantServiceSettings.newBuilder();
* operatingSystemVersionConstantServiceSettingsBuilder
* .getOperatingSystemVersionConstantSettings()
* .setRetrySettings(
* operatingSystemVersionConstantServiceSettingsBuilder
* .getOperatingSystemVersionConstantSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* OperatingSystemVersionConstantServiceSettings operatingSystemVersionConstantServiceSettings =
* operatingSystemVersionConstantServiceSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class OperatingSystemVersionConstantServiceSettings
extends ClientSettings<OperatingSystemVersionConstantServiceSettings> {
/** Returns the object with the settings used for calls to getOperatingSystemVersionConstant. */
public UnaryCallSettings<GetOperatingSystemVersionConstantRequest, OperatingSystemVersionConstant>
getOperatingSystemVersionConstantSettings() {
return ((OperatingSystemVersionConstantServiceStubSettings) getStubSettings())
.getOperatingSystemVersionConstantSettings();
}
public static final OperatingSystemVersionConstantServiceSettings create(
OperatingSystemVersionConstantServiceStubSettings stub) throws IOException {
return new OperatingSystemVersionConstantServiceSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return OperatingSystemVersionConstantServiceStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return OperatingSystemVersionConstantServiceStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return OperatingSystemVersionConstantServiceStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return OperatingSystemVersionConstantServiceStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return OperatingSystemVersionConstantServiceStubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return OperatingSystemVersionConstantServiceStubSettings.defaultTransportChannelProvider();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return OperatingSystemVersionConstantServiceStubSettings<|fim▁hole|> }
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected OperatingSystemVersionConstantServiceSettings(Builder settingsBuilder)
throws IOException {
super(settingsBuilder);
}
/** Builder for OperatingSystemVersionConstantServiceSettings. */
public static class Builder
extends ClientSettings.Builder<OperatingSystemVersionConstantServiceSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(OperatingSystemVersionConstantServiceStubSettings.newBuilder(clientContext));
}
protected Builder(OperatingSystemVersionConstantServiceSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(OperatingSystemVersionConstantServiceStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(OperatingSystemVersionConstantServiceStubSettings.newBuilder());
}
public OperatingSystemVersionConstantServiceStubSettings.Builder getStubSettingsBuilder() {
return ((OperatingSystemVersionConstantServiceStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to getOperatingSystemVersionConstant. */
public UnaryCallSettings.Builder<
GetOperatingSystemVersionConstantRequest, OperatingSystemVersionConstant>
getOperatingSystemVersionConstantSettings() {
return getStubSettingsBuilder().getOperatingSystemVersionConstantSettings();
}
@Override
public OperatingSystemVersionConstantServiceSettings build() throws IOException {
return new OperatingSystemVersionConstantServiceSettings(this);
}
}
}<|fim▁end|>
|
.defaultApiClientHeaderProviderBuilder();
|
<|file_name|>RecommendationLookup.java<|end_file_name|><|fim▁begin|>package de.codecentric.awesome.recommendation.core;
import java.util.HashMap;
import java.util.Map;
/**
* Created by afitz on 15.03.16.
*/
public class RecommendationLookup {
private static RecommendationLookup ourInstance = new RecommendationLookup();
private String standardProductRecommendation = "P999";
// Map<User, Product>
private final Map<String, String> recommendationMap = new HashMap<String, String>();
public static RecommendationLookup getInstance() {
return ourInstance;
}
private RecommendationLookup() {
recommendationMap.put("P00T", "P001");
recommendationMap.put("P001", "P002");
recommendationMap.put("P002", "P003");<|fim▁hole|> recommendationMap.put("P003", "P003");
}
public Product getRecommendation (Product product) {
return new Product((recommendationMap.containsKey(product.getId()) ? recommendationMap.get(product.getId()) : standardProductRecommendation));
}
}<|fim▁end|>
| |
<|file_name|>converter.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright<|fim▁hole|># documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def get_type_name(type_name, sub_type=None):
""" Returns a Java type according to a spec type
"""
if type_name == "enum":
return type_name
elif type_name == "boolean":
return "Boolean"
elif type_name == "integer":
return "Long"
elif type_name == "time":
return "Float"
elif type_name == "object":
return "Object"
elif type_name == "list":
return type_name
elif type_name == "float":
return "Float"
else:
return "String"
def get_idiomatic_name(name):
"""
"""
if name == "private":
return "private_"
elif name == "public":
return "public_"
else:
return name<|fim▁end|>
|
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
|
<|file_name|>CollectionUtils.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2002-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Miscellaneous collection utility methods.
* Mainly for internal use within the framework.
*
* @author Juergen Hoeller
* @author Rob Harrop
* @since 1.1.3
*/
public abstract class CollectionUtils {
/**
* Return <code>true</code> if the supplied Collection is <code>null</code>
* or empty. Otherwise, return <code>false</code>.
* @param collection the Collection to check
* @return whether the given Collection is empty
*/
public static boolean isEmpty(Collection collection) {
return (collection == null || collection.isEmpty());
}
/**
* Return <code>true</code> if the supplied Map is <code>null</code>
* or empty. Otherwise, return <code>false</code>.
* @param map the Map to check
* @return whether the given Map is empty
*/
public static boolean isEmpty(Map map) {
return (map == null || map.isEmpty());
}
/**
* Convert the supplied array into a List. A primitive array gets
* converted into a List of the appropriate wrapper type.
* <p>A <code>null</code> source value will be converted to an
* empty List.
* @param source the (potentially primitive) array
* @return the converted List result
* @see ObjectUtils#toObjectArray(Object)
*/
public static List arrayToList(Object source) {
return Arrays.asList(ObjectUtils.toObjectArray(source));
}
/**
* Merge the given array into the given Collection.
* @param array the array to merge (may be <code>null</code>)
* @param collection the target Collection to merge the array into
*/
public static void mergeArrayIntoCollection(Object array, Collection collection) {
if (collection == null) {
throw new IllegalArgumentException("Collection must not be null");
}
Object[] arr = ObjectUtils.toObjectArray(array);
for (int i = 0; i < arr.length; i++) {
collection.add(arr[i]);
}
}
/**
* Merge the given Properties instance into the given Map,
* copying all properties (key-value pairs) over.
* <p>Uses <code>Properties.propertyNames()</code> to even catch
* default properties linked into the original Properties instance.
* @param props the Properties instance to merge (may be <code>null</code>)
* @param map the target Map to merge the properties into
*/
public static void mergePropertiesIntoMap(Properties props, Map map) {
if (map == null) {
throw new IllegalArgumentException("Map must not be null");
}
if (props != null) {
for (Enumeration en = props.propertyNames(); en.hasMoreElements();) {
String key = (String) en.nextElement();
map.put(key, props.getProperty(key));
}
}
}
/**
* Check whether the given Iterator contains the given element.
* @param iterator the Iterator to check
* @param element the element to look for
* @return <code>true</code> if found, <code>false</code> else
*/
public static boolean contains(Iterator iterator, Object element) {
if (iterator != null) {
while (iterator.hasNext()) {
Object candidate = iterator.next();
if (ObjectUtils.nullSafeEquals(candidate, element)) {
return true;
}
}
}
return false;
}
/**
* Check whether the given Enumeration contains the given element.
* @param enumeration the Enumeration to check
* @param element the element to look for
* @return <code>true</code> if found, <code>false</code> else
*/
public static boolean contains(Enumeration enumeration, Object element) {
if (enumeration != null) {
while (enumeration.hasMoreElements()) {
Object candidate = enumeration.nextElement();
if (ObjectUtils.nullSafeEquals(candidate, element)) {
return true;
}
}
}
return false;
}
/**
* Check whether the given Collection contains the given element instance.
* <p>Enforces the given instance to be present, rather than returning
* <code>true</code> for an equal element as well.
* @param collection the Collection to check
* @param element the element to look for
* @return <code>true</code> if found, <code>false</code> else
*/
public static boolean containsInstance(Collection collection, Object element) {
if (collection != null) {
for (Iterator it = collection.iterator(); it.hasNext();) {
Object candidate = it.next();
if (candidate == element) {
return true;
}
}
}
return false;
}
/**
* Return <code>true</code> if any element in '<code>candidates</code>' is
* contained in '<code>source</code>'; otherwise returns <code>false</code>.
* @param source the source Collection
* @param candidates the candidates to search for
* @return whether any of the candidates has been found
*/
public static boolean containsAny(Collection source, Collection candidates) {
if (isEmpty(source) || isEmpty(candidates)) {
return false;
}
for (Iterator it = candidates.iterator(); it.hasNext();) {
if (source.contains(it.next())) {
return true;
}
}
return false;
}
/**
* Return the first element in '<code>candidates</code>' that is contained in
* '<code>source</code>'. If no element in '<code>candidates</code>' is present in
* '<code>source</code>' returns <code>null</code>. Iteration order is
* {@link Collection} implementation specific.
* @param source the source Collection
* @param candidates the candidates to search for
* @return the first present object, or <code>null</code> if not found
*/
public static Object findFirstMatch(Collection source, Collection candidates) {
if (isEmpty(source) || isEmpty(candidates)) {
return null;
}
for (Iterator it = candidates.iterator(); it.hasNext();) {
Object candidate = it.next();
if (source.contains(candidate)) {
return candidate;
}
}
return null;
}
/**
* Find a single value of the given type in the given Collection.
* @param collection the Collection to search
* @param type the type to look for
* @return a value of the given type found if there is a clear match,
* or <code>null</code> if none or more than one such value found
*/
public static Object findValueOfType(Collection collection, Class type) {
if (isEmpty(collection)) {
return null;
}
Object value = null;
for (Iterator it = collection.iterator(); it.hasNext();) {
Object obj = it.next();
if (type == null || type.isInstance(obj)) {
if (value != null) {
// More than one value found... no clear single value.
return null;
}
value = obj;
}
}
return value;
}
/**
* Find a single value of one of the given types in the given Collection:
* searching the Collection for a value of the first type, then
* searching for a value of the second type, etc.
* @param collection the collection to search
* @param types the types to look for, in prioritized order
* @return a value of one of the given types found if there is a clear match,
* or <code>null</code> if none or more than one such value found
*/
public static Object findValueOfType(Collection collection, Class[] types) {
if (isEmpty(collection) || ObjectUtils.isEmpty(types)) {
return null;
}
<|fim▁hole|> for (int i = 0; i < types.length; i++) {
Object value = findValueOfType(collection, types[i]);
if (value != null) {
return value;
}
}
return null;
}
/**
* Determine whether the given Collection only contains a single unique object.
* @param collection the Collection to check
* @return <code>true</code> if the collection contains a single reference or
* multiple references to the same instance, <code>false</code> else
*/
public static boolean hasUniqueObject(Collection collection) {
if (isEmpty(collection)) {
return false;
}
boolean hasCandidate = false;
Object candidate = null;
for (Iterator it = collection.iterator(); it.hasNext();) {
Object elem = it.next();
if (!hasCandidate) {
hasCandidate = true;
candidate = elem;
}
else if (candidate != elem) {
return false;
}
}
return true;
}
}<|fim▁end|>
| |
<|file_name|>addon-links.stories.js<|end_file_name|><|fim▁begin|>import { linkTo } from '@storybook/addon-links';
export default {
title: 'Addon/Links',
};
export const GoToWelcome = () => ({
template: '<my-button :rounded="true" @click="click" >This buttons links to Welcome</my-button>',
methods: {
click: linkTo('Welcome'),
},<|fim▁hole|>};<|fim▁end|>
|
});
GoToWelcome.story = {
name: 'Go to welcome',
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/* The main program of test panel */
requirejs.config({
paths : {
"dualless" : ".."
},
baseUrl : ".."
});
require(["debug/view/component",
"debug/view/information",
"dualless/directives/winbutton"
],
function app(component,
information,
winbutton){
var module = angular.module('main', []);
module.config(['$routeProvider', function configRouteProvider($routeProvider) {<|fim▁hole|>
$routeProvider.when('/info', information);
//$routeProvider.when('/component', component);
$routeProvider.when('/preparetest', {templateUrl: "view/preparetest.html"});
$routeProvider.otherwise({redirectTo : "/info" })
}]);
module.directive('winbutton',winbutton);
$(document).ready(function() {
angular.bootstrap(document,["main"]);
});
});<|fim▁end|>
| |
<|file_name|>bar.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(lang_items, no_std)]
#![no_std]
#![crate_type = "dylib"]
extern crate libc;
#[no_mangle]
pub extern fn bar() {}
#[lang = "stack_exhausted"] fn stack_exhausted() {}
#[lang = "eh_personality"] fn eh_personality() {}
#[lang = "panic_fmt"] fn panic_fmt() -> ! { loop {} }<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Rustdoc's HTML rendering module.
//!
//! This modules contains the bulk of the logic necessary for rendering a
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
//! rendering process is largely driven by the `format!` syntax extension to
//! perform all I/O into files and streams.
//!
//! The rendering process is largely driven by the `Context` and `Cache`
//! structures. The cache is pre-populated by crawling the crate in question,
//! and then it is shared among the various rendering threads. The cache is meant
//! to be a fairly large structure not implementing `Clone` (because it's shared
//! among threads). The context, however, should be a lightweight structure. This
//! is cloned per-thread and contains information about what is currently being
//! rendered.
//!
//! In order to speed up rendering (mostly because of markdown rendering), the
//! rendering process has been parallelized. This parallelization is only
//! exposed through the `crate` method on the context, and then also from the
//! fact that the shared cache is stored in TLS (and must be accessed as such).
//!
//! In addition to rendering the crate itself, this module is also responsible
//! for creating the corresponding search index and source file renderings.
//! These threads are not parallelized (they haven't been a bottleneck yet), and
//! both occur before the crate is rendered.
crate mod cache;
#[cfg(test)]
mod tests;
mod context;
mod print_item;
mod span_map;
mod templates;
mod write_shared;
crate use self::context::*;
crate use self::span_map::{collect_spans_and_sources, LinkFromSrc};
use std::collections::VecDeque;
use std::default::Default;
use std::fmt;
use std::fs;
use std::iter::Peekable;
use std::path::PathBuf;
use std::str;
use std::string::ToString;
use rustc_ast_pretty::pprust;
use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_hir::def::CtorKind;
use rustc_hir::def_id::DefId;
use rustc_hir::Mutability;
use rustc_middle::middle::stability;
use rustc_middle::ty;
use rustc_middle::ty::TyCtxt;
use rustc_span::{
symbol::{kw, sym, Symbol},
BytePos, FileName, RealFileName,
};
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use crate::clean::{self, ItemId, RenderedLink, SelfTy};
use crate::error::Error;
use crate::formats::cache::Cache;
use crate::formats::item_type::ItemType;
use crate::formats::{AssocItemRender, Impl, RenderMode};
use crate::html::escape::Escape;
use crate::html::format::{
href, print_abi_with_space, print_constness_with_space, print_default_space,
print_generic_bounds, print_where_clause, Buffer, HrefError, PrintWithSpace,
};
use crate::html::highlight;
use crate::html::markdown::{HeadingOffset, Markdown, MarkdownHtml, MarkdownSummaryLine};
use crate::html::sources;
use crate::scrape_examples::{CallData, CallLocation};
use crate::try_none;
/// A pair of name and its optional document.
crate type NameDoc = (String, Option<String>);
crate fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
crate::html::format::display_fn(move |f| {
if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { f.write_str(v) }
})
}
// Helper structs for rendering items/sidebars and carrying along contextual
// information
/// Struct representing one entry in the JS search index. These are all emitted
/// by hand to a large JS file at the end of cache-creation.
#[derive(Debug)]
crate struct IndexItem {
crate ty: ItemType,
crate name: String,
crate path: String,
crate desc: String,
crate parent: Option<DefId>,
crate parent_idx: Option<usize>,
crate search_type: Option<IndexItemFunctionType>,
crate aliases: Box<[Symbol]>,
}
/// A type used for the search index.
#[derive(Debug)]
crate struct RenderType {
name: Option<String>,
generics: Option<Vec<TypeWithKind>>,
}
/// Full type of functions/methods in the search index.
#[derive(Debug)]
crate struct IndexItemFunctionType {
inputs: Vec<TypeWithKind>,
output: Vec<TypeWithKind>,
}
impl Serialize for IndexItemFunctionType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// If we couldn't figure out a type, just write `null`.
let has_missing = self.inputs.iter().chain(self.output.iter()).any(|i| i.ty.name.is_none());
if has_missing {
serializer.serialize_none()
} else {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&self.inputs)?;
match self.output.as_slice() {
[] => {}
[one] => seq.serialize_element(one)?,
all => seq.serialize_element(all)?,
}
seq.end()
}
}
}
#[derive(Debug)]
crate struct TypeWithKind {
ty: RenderType,
kind: ItemType,
}
impl From<(RenderType, ItemType)> for TypeWithKind {
fn from(x: (RenderType, ItemType)) -> TypeWithKind {
TypeWithKind { ty: x.0, kind: x.1 }
}
}
impl Serialize for TypeWithKind {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(&self.ty.name)?;
seq.serialize_element(&self.kind)?;
if let Some(generics) = &self.ty.generics {
seq.serialize_element(generics)?;
}
seq.end()
}
}
#[derive(Debug, Clone)]
crate struct StylePath {
/// The path to the theme
crate path: PathBuf,
}
impl StylePath {
pub fn basename(&self) -> Result<String, Error> {
Ok(try_none!(try_none!(self.path.file_stem(), &self.path).to_str(), &self.path).to_string())
}
}
fn write_srclink(cx: &Context<'_>, item: &clean::Item, buf: &mut Buffer) {
if let Some(l) = cx.src_href(item) {
write!(buf, "<a class=\"srclink\" href=\"{}\" title=\"goto source code\">[src]</a>", l)
}
}
#[derive(Debug, Eq, PartialEq, Hash)]
struct ItemEntry {
url: String,
name: String,
}
impl ItemEntry {
fn new(mut url: String, name: String) -> ItemEntry {
while url.starts_with('/') {
url.remove(0);
}
ItemEntry { url, name }
}
}
impl ItemEntry {
crate fn print(&self) -> impl fmt::Display + '_ {
crate::html::format::display_fn(move |f| {
write!(f, "<a href=\"{}\">{}</a>", self.url, Escape(&self.name))
})
}
}
impl PartialOrd for ItemEntry {
fn partial_cmp(&self, other: &ItemEntry) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ItemEntry {
fn cmp(&self, other: &ItemEntry) -> ::std::cmp::Ordering {
self.name.cmp(&other.name)
}
}
#[derive(Debug)]
struct AllTypes {
structs: FxHashSet<ItemEntry>,
enums: FxHashSet<ItemEntry>,
unions: FxHashSet<ItemEntry>,
primitives: FxHashSet<ItemEntry>,
traits: FxHashSet<ItemEntry>,
macros: FxHashSet<ItemEntry>,
functions: FxHashSet<ItemEntry>,
typedefs: FxHashSet<ItemEntry>,
opaque_tys: FxHashSet<ItemEntry>,
statics: FxHashSet<ItemEntry>,
constants: FxHashSet<ItemEntry>,
attributes: FxHashSet<ItemEntry>,
derives: FxHashSet<ItemEntry>,
trait_aliases: FxHashSet<ItemEntry>,
}
impl AllTypes {
fn new() -> AllTypes {
let new_set = |cap| FxHashSet::with_capacity_and_hasher(cap, Default::default());
AllTypes {
structs: new_set(100),
enums: new_set(100),
unions: new_set(100),
primitives: new_set(26),
traits: new_set(100),
macros: new_set(100),
functions: new_set(100),
typedefs: new_set(100),
opaque_tys: new_set(100),
statics: new_set(100),
constants: new_set(100),
attributes: new_set(100),
derives: new_set(100),
trait_aliases: new_set(100),
}
}
fn append(&mut self, item_name: String, item_type: &ItemType) {
let mut url: Vec<_> = item_name.split("::").skip(1).collect();
if let Some(name) = url.pop() {
let new_url = format!("{}/{}.{}.html", url.join("/"), item_type, name);
url.push(name);
let name = url.join("::");
match *item_type {
ItemType::Struct => self.structs.insert(ItemEntry::new(new_url, name)),
ItemType::Enum => self.enums.insert(ItemEntry::new(new_url, name)),
ItemType::Union => self.unions.insert(ItemEntry::new(new_url, name)),
ItemType::Primitive => self.primitives.insert(ItemEntry::new(new_url, name)),
ItemType::Trait => self.traits.insert(ItemEntry::new(new_url, name)),
ItemType::Macro => self.macros.insert(ItemEntry::new(new_url, name)),
ItemType::Function => self.functions.insert(ItemEntry::new(new_url, name)),
ItemType::Typedef => self.typedefs.insert(ItemEntry::new(new_url, name)),
ItemType::OpaqueTy => self.opaque_tys.insert(ItemEntry::new(new_url, name)),
ItemType::Static => self.statics.insert(ItemEntry::new(new_url, name)),
ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)),
ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)),
ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)),
_ => true,
};
}
}
}
impl AllTypes {
fn print(self, f: &mut Buffer) {
fn print_entries(f: &mut Buffer, e: &FxHashSet<ItemEntry>, title: &str, class: &str) {
if !e.is_empty() {
let mut e: Vec<&ItemEntry> = e.iter().collect();
e.sort();
write!(
f,
"<h3 id=\"{}\">{}</h3><ul class=\"{} docblock\">",
title.replace(' ', "-"), // IDs cannot contain whitespaces.
title,
class
);
for s in e.iter() {
write!(f, "<li>{}</li>", s.print());
}
f.write_str("</ul>");
}
}
f.write_str(
"<h1 class=\"fqn\">\
<span class=\"in-band\">List of all items</span>\
<span class=\"out-of-band\">\
<span id=\"render-detail\">\
<a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \
title=\"collapse all docs\">\
[<span class=\"inner\">−</span>]\
</a>\
</span>
</span>
</h1>",
);
// Note: print_entries does not escape the title, because we know the current set of titles
// doesn't require escaping.
print_entries(f, &self.structs, "Structs", "structs");
print_entries(f, &self.enums, "Enums", "enums");
print_entries(f, &self.unions, "Unions", "unions");
print_entries(f, &self.primitives, "Primitives", "primitives");
print_entries(f, &self.traits, "Traits", "traits");
print_entries(f, &self.macros, "Macros", "macros");
print_entries(f, &self.attributes, "Attribute Macros", "attributes");
print_entries(f, &self.derives, "Derive Macros", "derives");
print_entries(f, &self.functions, "Functions", "functions");
print_entries(f, &self.typedefs, "Typedefs", "typedefs");
print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases");
print_entries(f, &self.opaque_tys, "Opaque Types", "opaque-types");
print_entries(f, &self.statics, "Statics", "statics");
print_entries(f, &self.constants, "Constants", "constants")
}
}
#[derive(Debug)]
enum Setting {
Section {
description: &'static str,
sub_settings: Vec<Setting>,
},
Toggle {
js_data_name: &'static str,
description: &'static str,
default_value: bool,
},
Select {
js_data_name: &'static str,
description: &'static str,
default_value: &'static str,
options: Vec<String>,
},
}
impl Setting {
fn display(&self, root_path: &str, suffix: &str) -> String {
match *self {
Setting::Section { description, ref sub_settings } => format!(
"<div class=\"setting-line\">\
<div class=\"title\">{}</div>\
<div class=\"sub-settings\">{}</div>
</div>",
description,
sub_settings.iter().map(|s| s.display(root_path, suffix)).collect::<String>()
),
Setting::Toggle { js_data_name, description, default_value } => format!(
"<div class=\"setting-line\">\
<label class=\"toggle\">\
<input type=\"checkbox\" id=\"{}\" {}>\
<span class=\"slider\"></span>\
</label>\
<div>{}</div>\
</div>",
js_data_name,
if default_value { " checked" } else { "" },
description,
),
Setting::Select { js_data_name, description, default_value, ref options } => format!(
"<div class=\"setting-line\">\
<div>{}</div>\
<label class=\"select-wrapper\">\
<select id=\"{}\" autocomplete=\"off\">{}</select>\
<img src=\"{}down-arrow{}.svg\" alt=\"Select item\">\
</label>\
</div>",
description,
js_data_name,
options
.iter()
.map(|opt| format!(
"<option value=\"{name}\" {}>{name}</option>",
if opt == default_value { "selected" } else { "" },
name = opt,
))
.collect::<String>(),
root_path,
suffix,
),
}
}
}
impl From<(&'static str, &'static str, bool)> for Setting {
fn from(values: (&'static str, &'static str, bool)) -> Setting {
Setting::Toggle { js_data_name: values.0, description: values.1, default_value: values.2 }
}
}
impl<T: Into<Setting>> From<(&'static str, Vec<T>)> for Setting {
fn from(values: (&'static str, Vec<T>)) -> Setting {
Setting::Section {
description: values.0,
sub_settings: values.1.into_iter().map(|v| v.into()).collect::<Vec<_>>(),
}
}
}
fn settings(root_path: &str, suffix: &str, theme_names: Vec<String>) -> Result<String, Error> {
// (id, explanation, default value)
let settings: &[Setting] = &[
(
"Theme preferences",
vec![
Setting::from(("use-system-theme", "Use system theme", true)),
Setting::Select {
js_data_name: "preferred-dark-theme",
description: "Preferred dark theme",
default_value: "dark",
options: theme_names.clone(),
},
Setting::Select {
js_data_name: "preferred-light-theme",
description: "Preferred light theme",
default_value: "light",
options: theme_names,
},
],
)
.into(),
("auto-hide-large-items", "Auto-hide item contents for large items.", true).into(),
("auto-hide-method-docs", "Auto-hide item methods' documentation", false).into(),
("auto-hide-trait-implementations", "Auto-hide trait implementation documentation", false)
.into(),
("go-to-only-result", "Directly go to item in search if there is only one result", false)
.into(),
("line-numbers", "Show line numbers on code examples", false).into(),
("disable-shortcuts", "Disable keyboard shortcuts", false).into(),
];
Ok(format!(
"<h1 class=\"fqn\">\
<span class=\"in-band\">Rustdoc settings</span>\
</h1>\
<div class=\"settings\">{}</div>\
<link rel=\"stylesheet\" href=\"{root_path}settings{suffix}.css\">\
<script src=\"{root_path}settings{suffix}.js\"></script>",
settings.iter().map(|s| s.display(root_path, suffix)).collect::<String>(),
root_path = root_path,
suffix = suffix
))
}
fn document(
w: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
parent: Option<&clean::Item>,
heading_offset: HeadingOffset,
) {
if let Some(ref name) = item.name {
info!("Documenting {}", name);
}
document_item_info(w, cx, item, parent);
if parent.is_none() {
document_full_collapsible(w, item, cx, heading_offset);
} else {
document_full(w, item, cx, heading_offset);
}
}
/// Render md_text as markdown.
fn render_markdown(
w: &mut Buffer,
cx: &Context<'_>,
md_text: &str,
links: Vec<RenderedLink>,
heading_offset: HeadingOffset,
) {
let mut ids = cx.id_map.borrow_mut();
write!(
w,
"<div class=\"docblock\">{}</div>",
Markdown {
content: md_text,
links: &links,
ids: &mut ids,
error_codes: cx.shared.codes,
edition: cx.shared.edition(),
playground: &cx.shared.playground,
heading_offset,
}
.into_string()
)
}
/// Writes a documentation block containing only the first paragraph of the documentation. If the
/// docs are longer, a "Read more" link is appended to the end.
fn document_short(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
link: AssocItemLink<'_>,
parent: &clean::Item,
show_def_docs: bool,
) {
document_item_info(w, cx, item, Some(parent));
if !show_def_docs {
return;
}
if let Some(s) = item.doc_value() {
let mut summary_html = MarkdownSummaryLine(&s, &item.links(cx)).into_string();
if s.contains('\n') {
let link = format!(r#" <a href="{}">Read more</a>"#, naive_assoc_href(item, link, cx));
if let Some(idx) = summary_html.rfind("</p>") {
summary_html.insert_str(idx, &link);
} else {
summary_html.push_str(&link);
}
}
write!(w, "<div class='docblock'>{}</div>", summary_html,);
}
}
fn document_full_collapsible(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
heading_offset: HeadingOffset,
) {
document_full_inner(w, item, cx, true, heading_offset);
}
fn document_full(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
heading_offset: HeadingOffset,
) {
document_full_inner(w, item, cx, false, heading_offset);
}
fn document_full_inner(
w: &mut Buffer,
item: &clean::Item,
cx: &Context<'_>,
is_collapsible: bool,
heading_offset: HeadingOffset,
) {
if let Some(s) = cx.shared.maybe_collapsed_doc_value(item) {
debug!("Doc block: =====\n{}\n=====", s);
if is_collapsible {
w.write_str(
"<details class=\"rustdoc-toggle top-doc\" open>\
<summary class=\"hideme\">\
<span>Expand description</span>\
</summary>",
);
render_markdown(w, cx, &s, item.links(cx), heading_offset);
w.write_str("</details>");
} else {
render_markdown(w, cx, &s, item.links(cx), heading_offset);
}
}
let kind = match &*item.kind {
clean::ItemKind::StrippedItem(box kind) | kind => kind,
};
if let clean::ItemKind::FunctionItem(..) | clean::ItemKind::MethodItem(..) = kind {
render_call_locations(w, cx, item);
}
}
/// Add extra information about an item such as:
///
/// * Stability
/// * Deprecated
/// * Required features (through the `doc_cfg` feature)
fn document_item_info(
w: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
parent: Option<&clean::Item>,
) {
let item_infos = short_item_info(item, cx, parent);
if !item_infos.is_empty() {
w.write_str("<div class=\"item-info\">");
for info in item_infos {
w.write_str(&info);
}
w.write_str("</div>");
}
}
fn portability(item: &clean::Item, parent: Option<&clean::Item>) -> Option<String> {
let cfg = match (&item.cfg, parent.and_then(|p| p.cfg.as_ref())) {
(Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
(cfg, _) => cfg.as_deref().cloned(),
};
debug!("Portability {:?} - {:?} = {:?}", item.cfg, parent.and_then(|p| p.cfg.as_ref()), cfg);
Some(format!("<div class=\"stab portability\">{}</div>", cfg?.render_long_html()))
}
/// Render the stability, deprecation and portability information that is displayed at the top of
/// the item's documentation.
fn short_item_info(
item: &clean::Item,
cx: &Context<'_>,
parent: Option<&clean::Item>,
) -> Vec<String> {
let mut extra_info = vec![];
let error_codes = cx.shared.codes;
if let Some(depr @ Deprecation { note, since, is_since_rustc_version: _, suggestion: _ }) =
item.deprecation(cx.tcx())
{
// We display deprecation messages for #[deprecated] and #[rustc_deprecated]
// but only display the future-deprecation messages for #[rustc_deprecated].
let mut message = if let Some(since) = since {
let since = &since.as_str();
if !stability::deprecation_in_effect(&depr) {
if *since == "TBD" {
String::from("Deprecating in a future Rust version")
} else {
format!("Deprecating in {}", Escape(since))
}
} else {
format!("Deprecated since {}", Escape(since))
}
} else {
String::from("Deprecated")
};
if let Some(note) = note {
let note = note.as_str();
let mut ids = cx.id_map.borrow_mut();
let html = MarkdownHtml(
¬e,
&mut ids,
error_codes,
cx.shared.edition(),
&cx.shared.playground,
);
message.push_str(&format!(": {}", html.into_string()));
}
extra_info.push(format!(
"<div class=\"stab deprecated\"><span class=\"emoji\">👎</span> {}</div>",
message,
));
}
// Render unstable items. But don't render "rustc_private" crates (internal compiler crates).
// Those crates are permanently unstable so it makes no sense to render "unstable" everywhere.
if let Some((StabilityLevel::Unstable { reason: _, issue, .. }, feature)) = item
.stability(cx.tcx())
.as_ref()
.filter(|stab| stab.feature != sym::rustc_private)
.map(|stab| (stab.level, stab.feature))
{
let mut message =
"<span class=\"emoji\">🔬</span> This is a nightly-only experimental API.".to_owned();
let mut feature = format!("<code>{}</code>", Escape(&feature.as_str()));
if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) {
feature.push_str(&format!(
" <a href=\"{url}{issue}\">#{issue}</a>",
url = url,
issue = issue
));
}
message.push_str(&format!(" ({})", feature));
extra_info.push(format!("<div class=\"stab unstable\">{}</div>", message));
}
if let Some(portability) = portability(item, parent) {
extra_info.push(portability);
}
extra_info
}
// Render the list of items inside one of the sections "Trait Implementations",
// "Auto Trait Implementations," "Blanket Trait Implementations" (on struct/enum pages).
fn render_impls(cx: &Context<'_>, w: &mut Buffer, impls: &[&&Impl], containing_item: &clean::Item) {
let tcx = cx.tcx();
let mut rendered_impls = impls
.iter()
.map(|i| {
let did = i.trait_did().unwrap();
let provided_trait_methods = i.inner_impl().provided_trait_methods(tcx);
let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_trait_methods);
let mut buffer = if w.is_for_html() { Buffer::html() } else { Buffer::new() };
render_impl(
&mut buffer,
cx,
i,
containing_item,
assoc_link,
RenderMode::Normal,
None,
&[],
ImplRenderingParameters {
show_def_docs: true,
is_on_foreign_type: false,
show_default_items: true,
show_non_assoc_items: true,
toggle_open_by_default: true,
},
);
buffer.into_inner()
})
.collect::<Vec<_>>();
rendered_impls.sort();
w.write_str(&rendered_impls.join(""));
}
fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>, cx: &Context<'_>) -> String {
use crate::formats::item_type::ItemType::*;
let name = it.name.as_ref().unwrap();
let ty = match it.type_() {
Typedef | AssocType => AssocType,
s => s,
};
let anchor = format!("#{}.{}", ty, name);
match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
AssocItemLink::Anchor(None) => anchor,
AssocItemLink::GotoSource(did, _) => {
href(did.expect_def_id(), cx).map(|p| format!("{}{}", p.0, anchor)).unwrap_or(anchor)
}
}
}
fn assoc_const(
w: &mut Buffer,
it: &clean::Item,
ty: &clean::Type,
_default: Option<&String>,
link: AssocItemLink<'_>,
extra: &str,
cx: &Context<'_>,
) {
write!(
w,
"{}{}const <a href=\"{}\" class=\"constant\">{}</a>: {}",
extra,
it.visibility.print_with_space(it.def_id, cx),
naive_assoc_href(it, link, cx),
it.name.as_ref().unwrap(),
ty.print(cx)
);
}
fn assoc_type(
w: &mut Buffer,
it: &clean::Item,
bounds: &[clean::GenericBound],
default: Option<&clean::Type>,
link: AssocItemLink<'_>,
extra: &str,
cx: &Context<'_>,
) {
write!(
w,
"{}type <a href=\"{}\" class=\"type\">{}</a>",
extra,
naive_assoc_href(it, link, cx),
it.name.as_ref().unwrap()
);
if !bounds.is_empty() {
write!(w, ": {}", print_generic_bounds(bounds, cx))
}
if let Some(default) = default {
write!(w, " = {}", default.print(cx))
}
}
fn render_stability_since_raw(
w: &mut Buffer,
ver: Option<&str>,
const_stability: Option<&ConstStability>,
containing_ver: Option<&str>,
containing_const_ver: Option<&str>,
) {
let ver = ver.filter(|inner| !inner.is_empty());
match (ver, const_stability) {
// stable and const stable
(Some(v), Some(ConstStability { level: StabilityLevel::Stable { since }, .. }))
if Some(since.as_str()).as_deref() != containing_const_ver =>
{
write!(
w,
"<span class=\"since\" title=\"Stable since Rust version {0}, const since {1}\">{0} (const: {1})</span>",
v, since
);
}
// stable and const unstable
(
Some(v),
Some(ConstStability { level: StabilityLevel::Unstable { issue, .. }, feature, .. }),
) => {
write!(
w,
"<span class=\"since\" title=\"Stable since Rust version {0}, const unstable\">{0} (const: ",
v
);
if let Some(n) = issue {
write!(
w,
"<a href=\"https://github.com/rust-lang/rust/issues/{}\" title=\"Tracking issue for {}\">unstable</a>",
n, feature
);
} else {
write!(w, "unstable");
}
write!(w, ")</span>");
}
// stable
(Some(v), _) if ver != containing_ver => {
write!(
w,
"<span class=\"since\" title=\"Stable since Rust version {0}\">{0}</span>",
v
);
}
_ => {}
}
}
fn render_assoc_item(
w: &mut Buffer,
item: &clean::Item,
link: AssocItemLink<'_>,
parent: ItemType,
cx: &Context<'_>,
) {
fn method(
w: &mut Buffer,
meth: &clean::Item,
header: hir::FnHeader,
g: &clean::Generics,
d: &clean::FnDecl,
link: AssocItemLink<'_>,
parent: ItemType,
cx: &Context<'_>,
) {
let name = meth.name.as_ref().unwrap();
let href = match link {
AssocItemLink::Anchor(Some(ref id)) => Some(format!("#{}", id)),
AssocItemLink::Anchor(None) => Some(format!("#{}.{}", meth.type_(), name)),
AssocItemLink::GotoSource(did, provided_methods) => {
// We're creating a link from an impl-item to the corresponding
// trait-item and need to map the anchored type accordingly.
let ty = if provided_methods.contains(name) {
ItemType::Method
} else {
ItemType::TyMethod
};
match (href(did.expect_def_id(), cx), ty) {
(Ok(p), ty) => Some(format!("{}#{}.{}", p.0, ty, name)),
(Err(HrefError::DocumentationNotBuilt), ItemType::TyMethod) => None,
(Err(_), ty) => Some(format!("#{}.{}", ty, name)),
}
}
};
let vis = meth.visibility.print_with_space(meth.def_id, cx).to_string();
let constness =
print_constness_with_space(&header.constness, meth.const_stability(cx.tcx()));
let asyncness = header.asyncness.print_with_space();
let unsafety = header.unsafety.print_with_space();
let defaultness = print_default_space(meth.is_default());
let abi = print_abi_with_space(header.abi).to_string();
// NOTE: `{:#}` does not print HTML formatting, `{}` does. So `g.print` can't be reused between the length calculation and `write!`.
let generics_len = format!("{:#}", g.print(cx)).len();
let mut header_len = "fn ".len()
+ vis.len()
+ constness.len()
+ asyncness.len()
+ unsafety.len()
+ defaultness.len()
+ abi.len()
+ name.as_str().len()
+ generics_len;
let (indent, indent_str, end_newline) = if parent == ItemType::Trait {
header_len += 4;
let indent_str = " ";
render_attributes_in_pre(w, meth, indent_str);
(4, indent_str, false)
} else {
render_attributes_in_code(w, meth);
(0, "", true)
};
w.reserve(header_len + "<a href=\"\" class=\"fnname\">{".len() + "</a>".len());
write!(
w,
"{indent}{vis}{constness}{asyncness}{unsafety}{defaultness}{abi}fn <a {href} class=\"fnname\">{name}</a>\
{generics}{decl}{notable_traits}{where_clause}",
indent = indent_str,
vis = vis,
constness = constness,
asyncness = asyncness,
unsafety = unsafety,
defaultness = defaultness,
abi = abi,
// links without a href are valid - https://www.w3schools.com/tags/att_a_href.asp
href = href.map(|href| format!("href=\"{}\"", href)).unwrap_or_else(|| "".to_string()),
name = name,
generics = g.print(cx),
decl = d.full_print(header_len, indent, header.asyncness, cx),
notable_traits = notable_traits_decl(d, cx),
where_clause = print_where_clause(g, cx, indent, end_newline),
)<|fim▁hole|> match *item.kind {
clean::StrippedItem(..) => {}
clean::TyMethodItem(ref m) => {
method(w, item, m.header, &m.generics, &m.decl, link, parent, cx)
}
clean::MethodItem(ref m, _) => {
method(w, item, m.header, &m.generics, &m.decl, link, parent, cx)
}
clean::AssocConstItem(ref ty, ref default) => assoc_const(
w,
item,
ty,
default.as_ref(),
link,
if parent == ItemType::Trait { " " } else { "" },
cx,
),
clean::AssocTypeItem(ref bounds, ref default) => assoc_type(
w,
item,
bounds,
default.as_ref(),
link,
if parent == ItemType::Trait { " " } else { "" },
cx,
),
_ => panic!("render_assoc_item called on non-associated-item"),
}
}
const ALLOWED_ATTRIBUTES: &[Symbol] =
&[sym::export_name, sym::link_section, sym::no_mangle, sym::repr, sym::non_exhaustive];
fn attributes(it: &clean::Item) -> Vec<String> {
it.attrs
.other_attrs
.iter()
.filter_map(|attr| {
if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
Some(pprust::attribute_to_string(attr).replace("\n", "").replace(" ", " "))
} else {
None
}
})
.collect()
}
// When an attribute is rendered inside a `<pre>` tag, it is formatted using
// a whitespace prefix and newline.
fn render_attributes_in_pre(w: &mut Buffer, it: &clean::Item, prefix: &str) {
for a in attributes(it) {
writeln!(w, "{}{}", prefix, a);
}
}
// When an attribute is rendered inside a <code> tag, it is formatted using
// a div to produce a newline after it.
fn render_attributes_in_code(w: &mut Buffer, it: &clean::Item) {
for a in attributes(it) {
write!(w, "<div class=\"code-attribute\">{}</div>", a);
}
}
#[derive(Copy, Clone)]
enum AssocItemLink<'a> {
Anchor(Option<&'a str>),
GotoSource(ItemId, &'a FxHashSet<Symbol>),
}
impl<'a> AssocItemLink<'a> {
fn anchor(&self, id: &'a str) -> Self {
match *self {
AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(id)),
ref other => *other,
}
}
}
fn render_assoc_items(
w: &mut Buffer,
cx: &Context<'_>,
containing_item: &clean::Item,
it: DefId,
what: AssocItemRender<'_>,
) {
let mut derefs = FxHashSet::default();
derefs.insert(it);
render_assoc_items_inner(w, cx, containing_item, it, what, &mut derefs)
}
fn render_assoc_items_inner(
w: &mut Buffer,
cx: &Context<'_>,
containing_item: &clean::Item,
it: DefId,
what: AssocItemRender<'_>,
derefs: &mut FxHashSet<DefId>,
) {
info!("Documenting associated items of {:?}", containing_item.name);
let cache = cx.cache();
let v = match cache.impls.get(&it) {
Some(v) => v,
None => return,
};
let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none());
if !non_trait.is_empty() {
let mut tmp_buf = Buffer::empty_from(w);
let render_mode = match what {
AssocItemRender::All => {
tmp_buf.write_str(
"<h2 id=\"implementations\" class=\"small-section-header\">\
Implementations<a href=\"#implementations\" class=\"anchor\"></a>\
</h2>",
);
RenderMode::Normal
}
AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => {
let id =
cx.derive_id(small_url_encode(format!("deref-methods-{:#}", type_.print(cx))));
if let Some(def_id) = type_.def_id(cx.cache()) {
cx.deref_id_map.borrow_mut().insert(def_id, id.clone());
}
write!(
tmp_buf,
"<h2 id=\"{id}\" class=\"small-section-header\">\
<span>Methods from {trait_}<Target = {type_}></span>\
<a href=\"#{id}\" class=\"anchor\"></a>\
</h2>",
id = id,
trait_ = trait_.print(cx),
type_ = type_.print(cx),
);
RenderMode::ForDeref { mut_: deref_mut_ }
}
};
let mut impls_buf = Buffer::empty_from(w);
for i in &non_trait {
render_impl(
&mut impls_buf,
cx,
i,
containing_item,
AssocItemLink::Anchor(None),
render_mode,
None,
&[],
ImplRenderingParameters {
show_def_docs: true,
is_on_foreign_type: false,
show_default_items: true,
show_non_assoc_items: true,
toggle_open_by_default: true,
},
);
}
if !impls_buf.is_empty() {
w.push_buffer(tmp_buf);
w.push_buffer(impls_buf);
}
}
if !traits.is_empty() {
let deref_impl =
traits.iter().find(|t| t.trait_did() == cx.tcx().lang_items().deref_trait());
if let Some(impl_) = deref_impl {
let has_deref_mut =
traits.iter().any(|t| t.trait_did() == cx.tcx().lang_items().deref_mut_trait());
render_deref_methods(w, cx, impl_, containing_item, has_deref_mut, derefs);
}
// If we were already one level into rendering deref methods, we don't want to render
// anything after recursing into any further deref methods above.
if let AssocItemRender::DerefFor { .. } = what {
return;
}
let (synthetic, concrete): (Vec<&&Impl>, Vec<&&Impl>) =
traits.iter().partition(|t| t.inner_impl().kind.is_auto());
let (blanket_impl, concrete): (Vec<&&Impl>, _) =
concrete.into_iter().partition(|t| t.inner_impl().kind.is_blanket());
let mut impls = Buffer::empty_from(w);
render_impls(cx, &mut impls, &concrete, containing_item);
let impls = impls.into_inner();
if !impls.is_empty() {
write!(
w,
"<h2 id=\"trait-implementations\" class=\"small-section-header\">\
Trait Implementations<a href=\"#trait-implementations\" class=\"anchor\"></a>\
</h2>\
<div id=\"trait-implementations-list\">{}</div>",
impls
);
}
if !synthetic.is_empty() {
w.write_str(
"<h2 id=\"synthetic-implementations\" class=\"small-section-header\">\
Auto Trait Implementations\
<a href=\"#synthetic-implementations\" class=\"anchor\"></a>\
</h2>\
<div id=\"synthetic-implementations-list\">",
);
render_impls(cx, w, &synthetic, containing_item);
w.write_str("</div>");
}
if !blanket_impl.is_empty() {
w.write_str(
"<h2 id=\"blanket-implementations\" class=\"small-section-header\">\
Blanket Implementations\
<a href=\"#blanket-implementations\" class=\"anchor\"></a>\
</h2>\
<div id=\"blanket-implementations-list\">",
);
render_impls(cx, w, &blanket_impl, containing_item);
w.write_str("</div>");
}
}
}
fn render_deref_methods(
w: &mut Buffer,
cx: &Context<'_>,
impl_: &Impl,
container_item: &clean::Item,
deref_mut: bool,
derefs: &mut FxHashSet<DefId>,
) {
let cache = cx.cache();
let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
let (target, real_target) = impl_
.inner_impl()
.items
.iter()
.find_map(|item| match *item.kind {
clean::TypedefItem(ref t, true) => Some(match *t {
clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
_ => (&t.type_, &t.type_),
}),
_ => None,
})
.expect("Expected associated type binding");
debug!("Render deref methods for {:#?}, target {:#?}", impl_.inner_impl().for_, target);
let what =
AssocItemRender::DerefFor { trait_: deref_type, type_: real_target, deref_mut_: deref_mut };
if let Some(did) = target.def_id(cache) {
if let Some(type_did) = impl_.inner_impl().for_.def_id(cache) {
// `impl Deref<Target = S> for S`
if did == type_did || !derefs.insert(did) {
// Avoid infinite cycles
return;
}
}
render_assoc_items_inner(w, cx, container_item, did, what, derefs);
} else {
if let Some(prim) = target.primitive_type() {
if let Some(&did) = cache.primitive_locations.get(&prim) {
render_assoc_items_inner(w, cx, container_item, did, what, derefs);
}
}
}
}
fn should_render_item(item: &clean::Item, deref_mut_: bool, tcx: TyCtxt<'_>) -> bool {
let self_type_opt = match *item.kind {
clean::MethodItem(ref method, _) => method.decl.self_type(),
clean::TyMethodItem(ref method) => method.decl.self_type(),
_ => None,
};
if let Some(self_ty) = self_type_opt {
let (by_mut_ref, by_box, by_value) = match self_ty {
SelfTy::SelfBorrowed(_, mutability)
| SelfTy::SelfExplicit(clean::BorrowedRef { mutability, .. }) => {
(mutability == Mutability::Mut, false, false)
}
SelfTy::SelfExplicit(clean::Type::Path { path }) => {
(false, Some(path.def_id()) == tcx.lang_items().owned_box(), false)
}
SelfTy::SelfValue => (false, false, true),
_ => (false, false, false),
};
(deref_mut_ || !by_mut_ref) && !by_box && !by_value
} else {
false
}
}
fn notable_traits_decl(decl: &clean::FnDecl, cx: &Context<'_>) -> String {
let mut out = Buffer::html();
if let Some(did) = decl.output.as_return().and_then(|t| t.def_id(cx.cache())) {
if let Some(impls) = cx.cache().impls.get(&did) {
for i in impls {
let impl_ = i.inner_impl();
if let Some(trait_) = &impl_.trait_ {
let trait_did = trait_.def_id();
if cx.cache().traits.get(&trait_did).map_or(false, |t| t.is_notable) {
if out.is_empty() {
write!(
&mut out,
"<div class=\"notable\">Notable traits for {}</div>\
<code class=\"content\">",
impl_.for_.print(cx)
);
}
//use the "where" class here to make it small
write!(
&mut out,
"<span class=\"where fmt-newline\">{}</span>",
impl_.print(false, cx)
);
for it in &impl_.items {
if let clean::TypedefItem(ref tydef, _) = *it.kind {
out.push_str("<span class=\"where fmt-newline\"> ");
let empty_set = FxHashSet::default();
let src_link =
AssocItemLink::GotoSource(trait_did.into(), &empty_set);
assoc_type(&mut out, it, &[], Some(&tydef.type_), src_link, "", cx);
out.push_str(";</span>");
}
}
}
}
}
}
}
if !out.is_empty() {
out.insert_str(
0,
"<span class=\"notable-traits\"><span class=\"notable-traits-tooltip\">ⓘ\
<div class=\"notable-traits-tooltiptext\"><span class=\"docblock\">",
);
out.push_str("</code></span></div></span></span>");
}
out.into_inner()
}
#[derive(Clone, Copy, Debug)]
struct ImplRenderingParameters {
show_def_docs: bool,
is_on_foreign_type: bool,
show_default_items: bool,
/// Whether or not to show methods.
show_non_assoc_items: bool,
toggle_open_by_default: bool,
}
fn render_impl(
w: &mut Buffer,
cx: &Context<'_>,
i: &Impl,
parent: &clean::Item,
link: AssocItemLink<'_>,
render_mode: RenderMode,
use_absolute: Option<bool>,
aliases: &[String],
rendering_params: ImplRenderingParameters,
) {
let cache = cx.cache();
let traits = &cache.traits;
let trait_ = i.trait_did().map(|did| &traits[&did]);
let mut close_tags = String::new();
// For trait implementations, the `interesting` output contains all methods that have doc
// comments, and the `boring` output contains all methods that do not. The distinction is
// used to allow hiding the boring methods.
// `containing_item` is used for rendering stability info. If the parent is a trait impl,
// `containing_item` will the grandparent, since trait impls can't have stability attached.
fn doc_impl_item(
boring: &mut Buffer,
interesting: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
parent: &clean::Item,
containing_item: &clean::Item,
link: AssocItemLink<'_>,
render_mode: RenderMode,
is_default_item: bool,
trait_: Option<&clean::Trait>,
rendering_params: ImplRenderingParameters,
) {
let item_type = item.type_();
let name = item.name.as_ref().unwrap();
let render_method_item = rendering_params.show_non_assoc_items
&& match render_mode {
RenderMode::Normal => true,
RenderMode::ForDeref { mut_: deref_mut_ } => {
should_render_item(item, deref_mut_, cx.tcx())
}
};
let in_trait_class = if trait_.is_some() { " trait-impl" } else { "" };
let mut doc_buffer = Buffer::empty_from(boring);
let mut info_buffer = Buffer::empty_from(boring);
let mut short_documented = true;
if render_method_item {
if !is_default_item {
if let Some(t) = trait_ {
// The trait item may have been stripped so we might not
// find any documentation or stability for it.
if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
// We need the stability of the item from the trait
// because impls can't have a stability.
if item.doc_value().is_some() {
document_item_info(&mut info_buffer, cx, it, Some(parent));
document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
short_documented = false;
} else {
// In case the item isn't documented,
// provide short documentation from the trait.
document_short(
&mut doc_buffer,
it,
cx,
link,
parent,
rendering_params.show_def_docs,
);
}
}
} else {
document_item_info(&mut info_buffer, cx, item, Some(parent));
if rendering_params.show_def_docs {
document_full(&mut doc_buffer, item, cx, HeadingOffset::H5);
short_documented = false;
}
}
} else {
document_short(
&mut doc_buffer,
item,
cx,
link,
parent,
rendering_params.show_def_docs,
);
}
}
let w = if short_documented && trait_.is_some() { interesting } else { boring };
let toggled = !doc_buffer.is_empty();
if toggled {
let method_toggle_class =
if item_type == ItemType::Method { " method-toggle" } else { "" };
write!(w, "<details class=\"rustdoc-toggle{}\" open><summary>", method_toggle_class);
}
match *item.kind {
clean::MethodItem(..) | clean::TyMethodItem(_) => {
// Only render when the method is not static or we allow static methods
if render_method_item {
let id = cx.derive_id(format!("{}.{}", item_type, name));
let source_id = trait_
.and_then(|trait_| {
trait_.items.iter().find(|item| {
item.name.map(|n| n.as_str().eq(&name.as_str())).unwrap_or(false)
})
})
.map(|item| format!("{}.{}", item.type_(), name));
write!(
w,
"<div id=\"{}\" class=\"{}{} has-srclink\">",
id, item_type, in_trait_class,
);
render_rightside(w, cx, item, containing_item);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
render_assoc_item(
w,
item,
link.anchor(source_id.as_ref().unwrap_or(&id)),
ItemType::Impl,
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
}
clean::TypedefItem(ref tydef, _) => {
let source_id = format!("{}.{}", ItemType::AssocType, name);
let id = cx.derive_id(source_id.clone());
write!(
w,
"<div id=\"{}\" class=\"{}{} has-srclink\">",
id, item_type, in_trait_class
);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
assoc_type(
w,
item,
&Vec::new(),
Some(&tydef.type_),
link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
clean::AssocConstItem(ref ty, ref default) => {
let source_id = format!("{}.{}", item_type, name);
let id = cx.derive_id(source_id.clone());
write!(
w,
"<div id=\"{}\" class=\"{}{} has-srclink\">",
id, item_type, in_trait_class
);
render_rightside(w, cx, item, containing_item);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
assoc_const(
w,
item,
ty,
default.as_ref(),
link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
clean::AssocTypeItem(ref bounds, ref default) => {
let source_id = format!("{}.{}", item_type, name);
let id = cx.derive_id(source_id.clone());
write!(w, "<div id=\"{}\" class=\"{}{}\">", id, item_type, in_trait_class,);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
w.write_str("<h4 class=\"code-header\">");
assoc_type(
w,
item,
bounds,
default.as_ref(),
link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx,
);
w.write_str("</h4>");
w.write_str("</div>");
}
clean::StrippedItem(..) => return,
_ => panic!("can't make docs for trait item with name {:?}", item.name),
}
w.push_buffer(info_buffer);
if toggled {
w.write_str("</summary>");
w.push_buffer(doc_buffer);
w.push_str("</details>");
}
}
let mut impl_items = Buffer::empty_from(w);
let mut default_impl_items = Buffer::empty_from(w);
for trait_item in &i.inner_impl().items {
doc_impl_item(
&mut default_impl_items,
&mut impl_items,
cx,
trait_item,
if trait_.is_some() { &i.impl_item } else { parent },
parent,
link,
render_mode,
false,
trait_.map(|t| &t.trait_),
rendering_params,
);
}
fn render_default_items(
boring: &mut Buffer,
interesting: &mut Buffer,
cx: &Context<'_>,
t: &clean::Trait,
i: &clean::Impl,
parent: &clean::Item,
containing_item: &clean::Item,
render_mode: RenderMode,
rendering_params: ImplRenderingParameters,
) {
for trait_item in &t.items {
let n = trait_item.name;
if i.items.iter().any(|m| m.name == n) {
continue;
}
let did = i.trait_.as_ref().unwrap().def_id();
let provided_methods = i.provided_trait_methods(cx.tcx());
let assoc_link = AssocItemLink::GotoSource(did.into(), &provided_methods);
doc_impl_item(
boring,
interesting,
cx,
trait_item,
parent,
containing_item,
assoc_link,
render_mode,
true,
Some(t),
rendering_params,
);
}
}
// If we've implemented a trait, then also emit documentation for all
// default items which weren't overridden in the implementation block.
// We don't emit documentation for default items if they appear in the
// Implementations on Foreign Types or Implementors sections.
if rendering_params.show_default_items {
if let Some(t) = trait_ {
render_default_items(
&mut default_impl_items,
&mut impl_items,
cx,
&t.trait_,
i.inner_impl(),
&i.impl_item,
parent,
render_mode,
rendering_params,
);
}
}
if render_mode == RenderMode::Normal {
let toggled = !(impl_items.is_empty() && default_impl_items.is_empty());
if toggled {
close_tags.insert_str(0, "</details>");
write!(
w,
"<details class=\"rustdoc-toggle implementors-toggle\"{}>",
if rendering_params.toggle_open_by_default { " open" } else { "" }
);
write!(w, "<summary>")
}
render_impl_summary(
w,
cx,
i,
parent,
parent,
rendering_params.show_def_docs,
use_absolute,
rendering_params.is_on_foreign_type,
aliases,
);
if toggled {
write!(w, "</summary>")
}
if let Some(ref dox) = cx.shared.maybe_collapsed_doc_value(&i.impl_item) {
let mut ids = cx.id_map.borrow_mut();
write!(
w,
"<div class=\"docblock\">{}</div>",
Markdown {
content: &*dox,
links: &i.impl_item.links(cx),
ids: &mut ids,
error_codes: cx.shared.codes,
edition: cx.shared.edition(),
playground: &cx.shared.playground,
heading_offset: HeadingOffset::H4
}
.into_string()
);
}
}
if !default_impl_items.is_empty() || !impl_items.is_empty() {
w.write_str("<div class=\"impl-items\">");
w.push_buffer(default_impl_items);
w.push_buffer(impl_items);
close_tags.insert_str(0, "</div>");
}
w.write_str(&close_tags);
}
// Render the items that appear on the right side of methods, impls, and
// associated types. For example "1.0.0 (const: 1.39.0) [src]".
fn render_rightside(
w: &mut Buffer,
cx: &Context<'_>,
item: &clean::Item,
containing_item: &clean::Item,
) {
let tcx = cx.tcx();
write!(w, "<div class=\"rightside\">");
render_stability_since_raw(
w,
item.stable_since(tcx).as_deref(),
item.const_stability(tcx),
containing_item.stable_since(tcx).as_deref(),
containing_item.const_stable_since(tcx).as_deref(),
);
write_srclink(cx, item, w);
w.write_str("</div>");
}
pub(crate) fn render_impl_summary(
w: &mut Buffer,
cx: &Context<'_>,
i: &Impl,
parent: &clean::Item,
containing_item: &clean::Item,
show_def_docs: bool,
use_absolute: Option<bool>,
is_on_foreign_type: bool,
// This argument is used to reference same type with different paths to avoid duplication
// in documentation pages for trait with automatic implementations like "Send" and "Sync".
aliases: &[String],
) {
let id = cx.derive_id(match i.inner_impl().trait_ {
Some(ref t) => {
if is_on_foreign_type {
get_id_for_impl_on_foreign_type(&i.inner_impl().for_, t, cx)
} else {
format!("impl-{}", small_url_encode(format!("{:#}", t.print(cx))))
}
}
None => "impl".to_string(),
});
let aliases = if aliases.is_empty() {
String::new()
} else {
format!(" data-aliases=\"{}\"", aliases.join(","))
};
write!(w, "<div id=\"{}\" class=\"impl has-srclink\"{}>", id, aliases);
render_rightside(w, cx, &i.impl_item, containing_item);
write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
write!(w, "<h3 class=\"code-header in-band\">");
if let Some(use_absolute) = use_absolute {
write!(w, "{}", i.inner_impl().print(use_absolute, cx));
if show_def_docs {
for it in &i.inner_impl().items {
if let clean::TypedefItem(ref tydef, _) = *it.kind {
w.write_str("<span class=\"where fmt-newline\"> ");
assoc_type(w, it, &[], Some(&tydef.type_), AssocItemLink::Anchor(None), "", cx);
w.write_str(";</span>");
}
}
}
} else {
write!(w, "{}", i.inner_impl().print(false, cx));
}
write!(w, "</h3>");
let is_trait = i.inner_impl().trait_.is_some();
if is_trait {
if let Some(portability) = portability(&i.impl_item, Some(parent)) {
write!(w, "<div class=\"item-info\">{}</div>", portability);
}
}
w.write_str("</div>");
}
fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) {
let parentlen = cx.current.len() - if it.is_mod() { 1 } else { 0 };
if it.is_struct()
|| it.is_trait()
|| it.is_primitive()
|| it.is_union()
|| it.is_enum()
|| it.is_mod()
|| it.is_typedef()
{
write!(
buffer,
"<h2 class=\"location\">{}{}</h2>",
match *it.kind {
clean::StructItem(..) => "Struct ",
clean::TraitItem(..) => "Trait ",
clean::PrimitiveItem(..) => "Primitive Type ",
clean::UnionItem(..) => "Union ",
clean::EnumItem(..) => "Enum ",
clean::TypedefItem(..) => "Type Definition ",
clean::ForeignTypeItem => "Foreign Type ",
clean::ModuleItem(..) =>
if it.is_crate() {
"Crate "
} else {
"Module "
},
_ => "",
},
it.name.as_ref().unwrap()
);
}
if it.is_crate() {
if let Some(ref version) = cx.cache().crate_version {
write!(
buffer,
"<div class=\"block version\">\
<div class=\"narrow-helper\"></div>\
<p>Version {}</p>\
</div>",
Escape(version),
);
}
}
buffer.write_str("<div class=\"sidebar-elems\">");
if it.is_crate() {
write!(
buffer,
"<a id=\"all-types\" href=\"all.html\"><p>See all {}'s items</p></a>",
it.name.as_ref().expect("crates always have a name"),
);
}
match *it.kind {
clean::StructItem(ref s) => sidebar_struct(cx, buffer, it, s),
clean::TraitItem(ref t) => sidebar_trait(cx, buffer, it, t),
clean::PrimitiveItem(_) => sidebar_primitive(cx, buffer, it),
clean::UnionItem(ref u) => sidebar_union(cx, buffer, it, u),
clean::EnumItem(ref e) => sidebar_enum(cx, buffer, it, e),
clean::TypedefItem(_, _) => sidebar_typedef(cx, buffer, it),
clean::ModuleItem(ref m) => sidebar_module(buffer, &m.items),
clean::ForeignTypeItem => sidebar_foreign_type(cx, buffer, it),
_ => {}
}
// The sidebar is designed to display sibling functions, modules and
// other miscellaneous information. since there are lots of sibling
// items (and that causes quadratic growth in large modules),
// we refactor common parts into a shared JavaScript file per module.
// still, we don't move everything into JS because we want to preserve
// as much HTML as possible in order to allow non-JS-enabled browsers
// to navigate the documentation (though slightly inefficiently).
if !it.is_mod() {
buffer.write_str("<h2 class=\"location\">Other items in<br>");
for (i, name) in cx.current.iter().take(parentlen).enumerate() {
if i > 0 {
buffer.write_str("::<wbr>");
}
write!(
buffer,
"<a href=\"{}index.html\">{}</a>",
&cx.root_path()[..(cx.current.len() - i - 1) * 3],
*name
);
}
buffer.write_str("</h2>");
}
// Sidebar refers to the enclosing module, not this module.
let relpath = if it.is_mod() && parentlen != 0 { "./" } else { "" };
write!(
buffer,
"<div id=\"sidebar-vars\" data-name=\"{name}\" data-ty=\"{ty}\" data-relpath=\"{path}\">\
</div>",
name = it.name.unwrap_or(kw::Empty),
ty = it.type_(),
path = relpath
);
write!(buffer, "<script defer src=\"{}sidebar-items.js\"></script>", relpath);
// Closes sidebar-elems div.
buffer.write_str("</div>");
}
fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
if used_links.insert(url.clone()) {
return url;
}
let mut add = 1;
while !used_links.insert(format!("{}-{}", url, add)) {
add += 1;
}
format!("{}-{}", url, add)
}
struct SidebarLink {
name: Symbol,
url: String,
}
impl fmt::Display for SidebarLink {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "<a href=\"#{}\">{}</a>", self.url, self.name)
}
}
impl PartialEq for SidebarLink {
fn eq(&self, other: &Self) -> bool {
self.url == other.url
}
}
impl Eq for SidebarLink {}
impl PartialOrd for SidebarLink {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for SidebarLink {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.url.cmp(&other.url)
}
}
fn get_methods(
i: &clean::Impl,
for_deref: bool,
used_links: &mut FxHashSet<String>,
deref_mut: bool,
tcx: TyCtxt<'_>,
) -> Vec<SidebarLink> {
i.items
.iter()
.filter_map(|item| match item.name {
Some(name) if !name.is_empty() && item.is_method() => {
if !for_deref || should_render_item(item, deref_mut, tcx) {
Some(SidebarLink {
name,
url: get_next_url(used_links, format!("method.{}", name)),
})
} else {
None
}
}
_ => None,
})
.collect::<Vec<_>>()
}
fn get_associated_constants(
i: &clean::Impl,
used_links: &mut FxHashSet<String>,
) -> Vec<SidebarLink> {
i.items
.iter()
.filter_map(|item| match item.name {
Some(name) if !name.is_empty() && item.is_associated_const() => Some(SidebarLink {
name,
url: get_next_url(used_links, format!("associatedconstant.{}", name)),
}),
_ => None,
})
.collect::<Vec<_>>()
}
// The point is to url encode any potential character from a type with genericity.
fn small_url_encode(s: String) -> String {
let mut st = String::new();
let mut last_match = 0;
for (idx, c) in s.char_indices() {
let escaped = match c {
'<' => "%3C",
'>' => "%3E",
' ' => "%20",
'?' => "%3F",
'\'' => "%27",
'&' => "%26",
',' => "%2C",
':' => "%3A",
';' => "%3B",
'[' => "%5B",
']' => "%5D",
'"' => "%22",
_ => continue,
};
st += &s[last_match..idx];
st += escaped;
// NOTE: we only expect single byte characters here - which is fine as long as we
// only match single byte characters
last_match = idx + 1;
}
if last_match != 0 {
st += &s[last_match..];
st
} else {
s
}
}
fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
let did = it.def_id.expect_def_id();
let cache = cx.cache();
if let Some(v) = cache.impls.get(&did) {
let mut used_links = FxHashSet::default();
{
let used_links_bor = &mut used_links;
let mut assoc_consts = v
.iter()
.flat_map(|i| get_associated_constants(i.inner_impl(), used_links_bor))
.collect::<Vec<_>>();
if !assoc_consts.is_empty() {
// We want links' order to be reproducible so we don't use unstable sort.
assoc_consts.sort();
out.push_str(
"<h3 class=\"sidebar-title\">\
<a href=\"#implementations\">Associated Constants</a>\
</h3>\
<div class=\"sidebar-links\">",
);
for line in assoc_consts {
write!(out, "{}", line);
}
out.push_str("</div>");
}
let mut methods = v
.iter()
.filter(|i| i.inner_impl().trait_.is_none())
.flat_map(|i| get_methods(i.inner_impl(), false, used_links_bor, false, cx.tcx()))
.collect::<Vec<_>>();
if !methods.is_empty() {
// We want links' order to be reproducible so we don't use unstable sort.
methods.sort();
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#implementations\">Methods</a></h3>\
<div class=\"sidebar-links\">",
);
for line in methods {
write!(out, "{}", line);
}
out.push_str("</div>");
}
}
if v.iter().any(|i| i.inner_impl().trait_.is_some()) {
if let Some(impl_) =
v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait())
{
let mut derefs = FxHashSet::default();
derefs.insert(did);
sidebar_deref_methods(cx, out, impl_, v, &mut derefs);
}
let format_impls = |impls: Vec<&Impl>| {
let mut links = FxHashSet::default();
let mut ret = impls
.iter()
.filter_map(|it| {
if let Some(ref i) = it.inner_impl().trait_ {
let i_display = format!("{:#}", i.print(cx));
let out = Escape(&i_display);
let encoded = small_url_encode(format!("{:#}", i.print(cx)));
let prefix = match it.inner_impl().polarity {
ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "",
ty::ImplPolarity::Negative => "!",
};
let generated =
format!("<a href=\"#impl-{}\">{}{}</a>", encoded, prefix, out);
if links.insert(generated.clone()) { Some(generated) } else { None }
} else {
None
}
})
.collect::<Vec<String>>();
ret.sort();
ret
};
let write_sidebar_links = |out: &mut Buffer, links: Vec<String>| {
out.push_str("<div class=\"sidebar-links\">");
for link in links {
out.push_str(&link);
}
out.push_str("</div>");
};
let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) =
v.iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_auto());
let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) =
concrete.into_iter().partition::<Vec<_>, _>(|i| i.inner_impl().kind.is_blanket());
let concrete_format = format_impls(concrete);
let synthetic_format = format_impls(synthetic);
let blanket_format = format_impls(blanket_impl);
if !concrete_format.is_empty() {
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#trait-implementations\">\
Trait Implementations</a></h3>",
);
write_sidebar_links(out, concrete_format);
}
if !synthetic_format.is_empty() {
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#synthetic-implementations\">\
Auto Trait Implementations</a></h3>",
);
write_sidebar_links(out, synthetic_format);
}
if !blanket_format.is_empty() {
out.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#blanket-implementations\">\
Blanket Implementations</a></h3>",
);
write_sidebar_links(out, blanket_format);
}
}
}
}
fn sidebar_deref_methods(
cx: &Context<'_>,
out: &mut Buffer,
impl_: &Impl,
v: &[Impl],
derefs: &mut FxHashSet<DefId>,
) {
let c = cx.cache();
debug!("found Deref: {:?}", impl_);
if let Some((target, real_target)) =
impl_.inner_impl().items.iter().find_map(|item| match *item.kind {
clean::TypedefItem(ref t, true) => Some(match *t {
clean::Typedef { item_type: Some(ref type_), .. } => (type_, &t.type_),
_ => (&t.type_, &t.type_),
}),
_ => None,
})
{
debug!("found target, real_target: {:?} {:?}", target, real_target);
if let Some(did) = target.def_id(c) {
if let Some(type_did) = impl_.inner_impl().for_.def_id(c) {
// `impl Deref<Target = S> for S`
if did == type_did || !derefs.insert(did) {
// Avoid infinite cycles
return;
}
}
}
let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
let inner_impl = target
.def_id(c)
.or_else(|| {
target.primitive_type().and_then(|prim| c.primitive_locations.get(&prim).cloned())
})
.and_then(|did| c.impls.get(&did));
if let Some(impls) = inner_impl {
debug!("found inner_impl: {:?}", impls);
let mut used_links = FxHashSet::default();
let mut ret = impls
.iter()
.filter(|i| i.inner_impl().trait_.is_none())
.flat_map(|i| {
get_methods(i.inner_impl(), true, &mut used_links, deref_mut, cx.tcx())
})
.collect::<Vec<_>>();
if !ret.is_empty() {
let map;
let id = if let Some(target_def_id) = real_target.def_id(c) {
map = cx.deref_id_map.borrow();
map.get(&target_def_id).expect("Deref section without derived id")
} else {
"deref-methods"
};
write!(
out,
"<h3 class=\"sidebar-title\"><a href=\"#{}\">Methods from {}<Target={}></a></h3>",
id,
Escape(&format!("{:#}", impl_.inner_impl().trait_.as_ref().unwrap().print(cx))),
Escape(&format!("{:#}", real_target.print(cx))),
);
// We want links' order to be reproducible so we don't use unstable sort.
ret.sort();
out.push_str("<div class=\"sidebar-links\">");
for link in ret {
write!(out, "{}", link);
}
out.push_str("</div>");
}
}
// Recurse into any further impls that might exist for `target`
if let Some(target_did) = target.def_id(c) {
if let Some(target_impls) = c.impls.get(&target_did) {
if let Some(target_deref_impl) = target_impls.iter().find(|i| {
i.inner_impl()
.trait_
.as_ref()
.map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
.unwrap_or(false)
}) {
sidebar_deref_methods(cx, out, target_deref_impl, target_impls, derefs);
}
}
}
}
}
fn sidebar_struct(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, s: &clean::Struct) {
let mut sidebar = Buffer::new();
let fields = get_struct_fields_name(&s.fields);
if !fields.is_empty() {
if let CtorKind::Fictive = s.struct_type {
sidebar.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#fields\">Fields</a></h3>\
<div class=\"sidebar-links\">",
);
for field in fields {
sidebar.push_str(&field);
}
sidebar.push_str("</div>");
} else if let CtorKind::Fn = s.struct_type {
sidebar
.push_str("<h3 class=\"sidebar-title\"><a href=\"#fields\">Tuple Fields</a></h3>");
}
}
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn get_id_for_impl_on_foreign_type(
for_: &clean::Type,
trait_: &clean::Path,
cx: &Context<'_>,
) -> String {
small_url_encode(format!("impl-{:#}-for-{:#}", trait_.print(cx), for_.print(cx)))
}
fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> {
match *item.kind {
clean::ItemKind::ImplItem(ref i) => {
i.trait_.as_ref().map(|trait_| {
// Alternative format produces no URLs,
// so this parameter does nothing.
(
format!("{:#}", i.for_.print(cx)),
get_id_for_impl_on_foreign_type(&i.for_, trait_, cx),
)
})
}
_ => None,
}
}
fn sidebar_trait(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, t: &clean::Trait) {
buf.write_str("<div class=\"block items\">");
fn print_sidebar_section(
out: &mut Buffer,
items: &[clean::Item],
before: &str,
filter: impl Fn(&clean::Item) -> bool,
write: impl Fn(&mut Buffer, &str),
after: &str,
) {
let mut items = items
.iter()
.filter_map(|m| match m.name {
Some(ref name) if filter(m) => Some(name.as_str()),
_ => None,
})
.collect::<Vec<_>>();
if !items.is_empty() {
items.sort_unstable();
out.push_str(before);
for item in items.into_iter() {
write(out, &item);
}
out.push_str(after);
}
}
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#associated-types\">\
Associated Types</a></h3><div class=\"sidebar-links\">",
|m| m.is_associated_type(),
|out, sym| write!(out, "<a href=\"#associatedtype.{0}\">{0}</a>", sym),
"</div>",
);
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#associated-const\">\
Associated Constants</a></h3><div class=\"sidebar-links\">",
|m| m.is_associated_const(),
|out, sym| write!(out, "<a href=\"#associatedconstant.{0}\">{0}</a>", sym),
"</div>",
);
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#required-methods\">\
Required Methods</a></h3><div class=\"sidebar-links\">",
|m| m.is_ty_method(),
|out, sym| write!(out, "<a href=\"#tymethod.{0}\">{0}</a>", sym),
"</div>",
);
print_sidebar_section(
buf,
&t.items,
"<h3 class=\"sidebar-title\"><a href=\"#provided-methods\">\
Provided Methods</a></h3><div class=\"sidebar-links\">",
|m| m.is_method(),
|out, sym| write!(out, "<a href=\"#method.{0}\">{0}</a>", sym),
"</div>",
);
let cache = cx.cache();
if let Some(implementors) = cache.implementors.get(&it.def_id.expect_def_id()) {
let mut res = implementors
.iter()
.filter(|i| {
i.inner_impl().for_.def_id(cache).map_or(false, |d| !cache.paths.contains_key(&d))
})
.filter_map(|i| extract_for_impl_name(&i.impl_item, cx))
.collect::<Vec<_>>();
if !res.is_empty() {
res.sort();
buf.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#foreign-impls\">\
Implementations on Foreign Types</a></h3>\
<div class=\"sidebar-links\">",
);
for (name, id) in res.into_iter() {
write!(buf, "<a href=\"#{}\">{}</a>", id, Escape(&name));
}
buf.push_str("</div>");
}
}
sidebar_assoc_items(cx, buf, it);
buf.push_str("<h3 class=\"sidebar-title\"><a href=\"#implementors\">Implementors</a></h3>");
if t.is_auto {
buf.push_str(
"<h3 class=\"sidebar-title\"><a \
href=\"#synthetic-implementors\">Auto Implementors</a></h3>",
);
}
buf.push_str("</div>")
}
fn sidebar_primitive(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
let mut sidebar = Buffer::new();
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn sidebar_typedef(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
let mut sidebar = Buffer::new();
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn get_struct_fields_name(fields: &[clean::Item]) -> Vec<String> {
let mut fields = fields
.iter()
.filter(|f| matches!(*f.kind, clean::StructFieldItem(..)))
.filter_map(|f| {
f.name.map(|name| format!("<a href=\"#structfield.{name}\">{name}</a>", name = name))
})
.collect::<Vec<_>>();
fields.sort();
fields
}
fn sidebar_union(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, u: &clean::Union) {
let mut sidebar = Buffer::new();
let fields = get_struct_fields_name(&u.fields);
if !fields.is_empty() {
sidebar.push_str(
"<h3 class=\"sidebar-title\"><a href=\"#fields\">Fields</a></h3>\
<div class=\"sidebar-links\">",
);
for field in fields {
sidebar.push_str(&field);
}
sidebar.push_str("</div>");
}
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn sidebar_enum(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, e: &clean::Enum) {
let mut sidebar = Buffer::new();
let mut variants = e
.variants
.iter()
.filter_map(|v| {
v.name
.as_ref()
.map(|name| format!("<a href=\"#variant.{name}\">{name}</a>", name = name))
})
.collect::<Vec<_>>();
if !variants.is_empty() {
variants.sort_unstable();
sidebar.push_str(&format!(
"<h3 class=\"sidebar-title\"><a href=\"#variants\">Variants</a></h3>\
<div class=\"sidebar-links\">{}</div>",
variants.join(""),
));
}
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
fn item_ty_to_strs(ty: ItemType) -> (&'static str, &'static str) {
match ty {
ItemType::ExternCrate | ItemType::Import => ("reexports", "Re-exports"),
ItemType::Module => ("modules", "Modules"),
ItemType::Struct => ("structs", "Structs"),
ItemType::Union => ("unions", "Unions"),
ItemType::Enum => ("enums", "Enums"),
ItemType::Function => ("functions", "Functions"),
ItemType::Typedef => ("types", "Type Definitions"),
ItemType::Static => ("statics", "Statics"),
ItemType::Constant => ("constants", "Constants"),
ItemType::Trait => ("traits", "Traits"),
ItemType::Impl => ("impls", "Implementations"),
ItemType::TyMethod => ("tymethods", "Type Methods"),
ItemType::Method => ("methods", "Methods"),
ItemType::StructField => ("fields", "Struct Fields"),
ItemType::Variant => ("variants", "Variants"),
ItemType::Macro => ("macros", "Macros"),
ItemType::Primitive => ("primitives", "Primitive Types"),
ItemType::AssocType => ("associated-types", "Associated Types"),
ItemType::AssocConst => ("associated-consts", "Associated Constants"),
ItemType::ForeignType => ("foreign-types", "Foreign Types"),
ItemType::Keyword => ("keywords", "Keywords"),
ItemType::OpaqueTy => ("opaque-types", "Opaque Types"),
ItemType::ProcAttribute => ("attributes", "Attribute Macros"),
ItemType::ProcDerive => ("derives", "Derive Macros"),
ItemType::TraitAlias => ("trait-aliases", "Trait aliases"),
ItemType::Generic => unreachable!(),
}
}
fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
let mut sidebar = String::new();
// Re-exports are handled a bit differently because they can be extern crates or imports.
if items.iter().any(|it| {
it.name.is_some()
&& (it.type_() == ItemType::ExternCrate
|| (it.type_() == ItemType::Import && !it.is_stripped()))
}) {
let (id, name) = item_ty_to_strs(ItemType::Import);
sidebar.push_str(&format!("<li><a href=\"#{}\">{}</a></li>", id, name));
}
// ordering taken from item_module, reorder, where it prioritized elements in a certain order
// to print its headings
for &myty in &[
ItemType::Primitive,
ItemType::Module,
ItemType::Macro,
ItemType::Struct,
ItemType::Enum,
ItemType::Constant,
ItemType::Static,
ItemType::Trait,
ItemType::Function,
ItemType::Typedef,
ItemType::Union,
ItemType::Impl,
ItemType::TyMethod,
ItemType::Method,
ItemType::StructField,
ItemType::Variant,
ItemType::AssocType,
ItemType::AssocConst,
ItemType::ForeignType,
ItemType::Keyword,
] {
if items.iter().any(|it| !it.is_stripped() && it.type_() == myty && it.name.is_some()) {
let (id, name) = item_ty_to_strs(myty);
sidebar.push_str(&format!("<li><a href=\"#{}\">{}</a></li>", id, name));
}
}
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\"><ul>{}</ul></div>", sidebar);
}
}
fn sidebar_foreign_type(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item) {
let mut sidebar = Buffer::new();
sidebar_assoc_items(cx, &mut sidebar, it);
if !sidebar.is_empty() {
write!(buf, "<div class=\"block items\">{}</div>", sidebar.into_inner());
}
}
crate const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
/// Returns a list of all paths used in the type.
/// This is used to help deduplicate imported impls
/// for reexported types. If any of the contained
/// types are re-exported, we don't use the corresponding
/// entry from the js file, as inlining will have already
/// picked up the impl
fn collect_paths_for_type(first_ty: clean::Type, cache: &Cache) -> Vec<String> {
let mut out = Vec::new();
let mut visited = FxHashSet::default();
let mut work = VecDeque::new();
let mut process_path = |did: DefId| {
let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone());
let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern);
if let Some(path) = fqp {
out.push(path.join("::"));
}
};
work.push_back(first_ty);
while let Some(ty) = work.pop_front() {
if !visited.insert(ty.clone()) {
continue;
}
match ty {
clean::Type::Path { path } => process_path(path.def_id()),
clean::Type::Tuple(tys) => {
work.extend(tys.into_iter());
}
clean::Type::Slice(ty) => {
work.push_back(*ty);
}
clean::Type::Array(ty, _) => {
work.push_back(*ty);
}
clean::Type::RawPointer(_, ty) => {
work.push_back(*ty);
}
clean::Type::BorrowedRef { type_, .. } => {
work.push_back(*type_);
}
clean::Type::QPath { self_type, trait_, .. } => {
work.push_back(*self_type);
process_path(trait_.def_id());
}
_ => {}
}
}
out
}
const MAX_FULL_EXAMPLES: usize = 5;
const NUM_VISIBLE_LINES: usize = 10;
/// Generates the HTML for example call locations generated via the --scrape-examples flag.
fn render_call_locations(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item) {
let tcx = cx.tcx();
let def_id = item.def_id.expect_def_id();
let key = tcx.def_path_hash(def_id);
let call_locations = match cx.shared.call_locations.get(&key) {
Some(call_locations) => call_locations,
_ => {
return;
}
};
// Generate a unique ID so users can link to this section for a given method
let id = cx.id_map.borrow_mut().derive("scraped-examples");
write!(
w,
"<div class=\"docblock scraped-example-list\">\
<span></span>\
<h5 id=\"{id}\" class=\"section-header\">\
<a href=\"#{id}\">Examples found in repository</a>\
</h5>",
id = id
);
// Create a URL to a particular location in a reverse-dependency's source file
let link_to_loc = |call_data: &CallData, loc: &CallLocation| -> (String, String) {
let (line_lo, line_hi) = loc.call_expr.line_span;
let (anchor, title) = if line_lo == line_hi {
((line_lo + 1).to_string(), format!("line {}", line_lo + 1))
} else {
(
format!("{}-{}", line_lo + 1, line_hi + 1),
format!("lines {}-{}", line_lo + 1, line_hi + 1),
)
};
let url = format!("{}{}#{}", cx.root_path(), call_data.url, anchor);
(url, title)
};
// Generate the HTML for a single example, being the title and code block
let write_example = |w: &mut Buffer, (path, call_data): (&PathBuf, &CallData)| -> bool {
let contents = match fs::read_to_string(&path) {
Ok(contents) => contents,
Err(err) => {
let span = item.span(tcx).inner();
tcx.sess
.span_err(span, &format!("failed to read file {}: {}", path.display(), err));
return false;
}
};
// To reduce file sizes, we only want to embed the source code needed to understand the example, not
// the entire file. So we find the smallest byte range that covers all items enclosing examples.
assert!(!call_data.locations.is_empty());
let min_loc =
call_data.locations.iter().min_by_key(|loc| loc.enclosing_item.byte_span.0).unwrap();
let byte_min = min_loc.enclosing_item.byte_span.0;
let line_min = min_loc.enclosing_item.line_span.0;
let max_loc =
call_data.locations.iter().max_by_key(|loc| loc.enclosing_item.byte_span.1).unwrap();
let byte_max = max_loc.enclosing_item.byte_span.1;
let line_max = max_loc.enclosing_item.line_span.1;
// The output code is limited to that byte range.
let contents_subset = &contents[(byte_min as usize)..(byte_max as usize)];
// The call locations need to be updated to reflect that the size of the program has changed.
// Specifically, the ranges are all subtracted by `byte_min` since that's the new zero point.
let (mut byte_ranges, line_ranges): (Vec<_>, Vec<_>) = call_data
.locations
.iter()
.map(|loc| {
let (byte_lo, byte_hi) = loc.call_expr.byte_span;
let (line_lo, line_hi) = loc.call_expr.line_span;
let byte_range = (byte_lo - byte_min, byte_hi - byte_min);
let line_range = (line_lo - line_min, line_hi - line_min);
let (line_url, line_title) = link_to_loc(call_data, loc);
(byte_range, (line_range, line_url, line_title))
})
.unzip();
let (_, init_url, init_title) = &line_ranges[0];
let needs_expansion = line_max - line_min > NUM_VISIBLE_LINES;
let locations_encoded = serde_json::to_string(&line_ranges).unwrap();
write!(
w,
"<div class=\"scraped-example {expanded_cls}\" data-locs=\"{locations}\">\
<div class=\"scraped-example-title\">\
{name} (<a href=\"{url}\">{title}</a>)\
</div>\
<div class=\"code-wrapper\">",
expanded_cls = if needs_expansion { "" } else { "expanded" },
name = call_data.display_name,
url = init_url,
title = init_title,
// The locations are encoded as a data attribute, so they can be read
// later by the JS for interactions.
locations = Escape(&locations_encoded)
);
if line_ranges.len() > 1 {
write!(w, r#"<span class="prev">≺</span> <span class="next">≻</span>"#);
}
if needs_expansion {
write!(w, r#"<span class="expand">↕</span>"#);
}
// Look for the example file in the source map if it exists, otherwise return a dummy span
let file_span = (|| {
let source_map = tcx.sess.source_map();
let crate_src = tcx.sess.local_crate_source_file.as_ref()?;
let abs_crate_src = crate_src.canonicalize().ok()?;
let crate_root = abs_crate_src.parent()?.parent()?;
let rel_path = path.strip_prefix(crate_root).ok()?;
let files = source_map.files();
let file = files.iter().find(|file| match &file.name {
FileName::Real(RealFileName::LocalPath(other_path)) => rel_path == other_path,
_ => false,
})?;
Some(rustc_span::Span::with_root_ctxt(
file.start_pos + BytePos(byte_min),
file.start_pos + BytePos(byte_max),
))
})()
.unwrap_or(rustc_span::DUMMY_SP);
// The root path is the inverse of Context::current
let root_path = vec!["../"; cx.current.len() - 1].join("");
let mut decoration_info = FxHashMap::default();
decoration_info.insert("highlight focus", vec![byte_ranges.remove(0)]);
decoration_info.insert("highlight", byte_ranges);
sources::print_src(
w,
contents_subset,
call_data.edition,
file_span,
cx,
&root_path,
Some(highlight::DecorationInfo(decoration_info)),
sources::SourceContext::Embedded { offset: line_min },
);
write!(w, "</div></div>");
true
};
// The call locations are output in sequence, so that sequence needs to be determined.
// Ideally the most "relevant" examples would be shown first, but there's no general algorithm
// for determining relevance. Instead, we prefer the smallest examples being likely the easiest to
// understand at a glance.
let ordered_locations = {
let sort_criterion = |(_, call_data): &(_, &CallData)| {
// Use the first location because that's what the user will see initially
let (lo, hi) = call_data.locations[0].enclosing_item.byte_span;
hi - lo
};
let mut locs = call_locations.into_iter().collect::<Vec<_>>();
locs.sort_by_key(sort_criterion);
locs
};
let mut it = ordered_locations.into_iter().peekable();
// An example may fail to write if its source can't be read for some reason, so this method
// continues iterating until a write suceeds
let write_and_skip_failure = |w: &mut Buffer, it: &mut Peekable<_>| {
while let Some(example) = it.next() {
if write_example(&mut *w, example) {
break;
}
}
};
// Write just one example that's visible by default in the method's description.
write_and_skip_failure(w, &mut it);
// Then add the remaining examples in a hidden section.
if it.peek().is_some() {
write!(
w,
"<details class=\"rustdoc-toggle more-examples-toggle\">\
<summary class=\"hideme\">\
<span>More examples</span>\
</summary>\
<div class=\"more-scraped-examples\">\
<div class=\"toggle-line\"><div class=\"toggle-line-inner\"></div></div>\
<div class=\"more-scraped-examples-inner\">"
);
// Only generate inline code for MAX_FULL_EXAMPLES number of examples. Otherwise we could
// make the page arbitrarily huge!
for _ in 0..MAX_FULL_EXAMPLES {
write_and_skip_failure(w, &mut it);
}
// For the remaining examples, generate a <ul> containing links to the source files.
if it.peek().is_some() {
write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#);
it.for_each(|(_, call_data)| {
let (url, _) = link_to_loc(&call_data, &call_data.locations[0]);
write!(
w,
r#"<li><a href="{url}">{name}</a></li>"#,
url = url,
name = call_data.display_name
);
});
write!(w, "</ul></div>");
}
write!(w, "</div></div></details>");
}
write!(w, "</div>");
}<|fim▁end|>
|
}
|
<|file_name|>market.go<|end_file_name|><|fim▁begin|>package market
import (
"github.com/Efruit/marqit/exchange"
"github.com/Efruit/marqit/managers"
"github.com/nu7hatch/gouuid"
"time"
)
type Exchange interface {
manager.Bank
manager.Broker
manager.Ticker
exchange.Dealership
exchange.Auctioner
Init() // Perform initial setup
RunID() (uuid.UUID, uint64, exchange.Mode) // Tell the Run identifier, open number, and exchange model.
AddStock(exchange.Stock) // Define a stock
IPO(exchange.Stock, float32, []exchange.LicenseID) // IPO a stock, volume specified by the exchange.Stock.Number, price by [2], initial holder(s) by [3]. If [3] is empty, the entirety will be given to a random trader.
List() []exchange.Stock // Retrieve the active stock list
Start(time.Duration, time.Duration, uint) // Run the simulation with a day length of [1] and a day count of [2]
Open() // Open the market
Pause() // Stop processing queue items
Resume() // Continue the queue<|fim▁hole|> Status() bool // Are we trading?
}<|fim▁end|>
|
Close(Normal bool) exchange.Summary // Sound the closing bell. Normal specifies the nature of the closing.
|
<|file_name|>correct.hpp<|end_file_name|><|fim▁begin|>// Boost.Geometry (aka GGL, Generic Geometry Library)
// Copyright (c) 2007-2012 Barend Gehrels, Amsterdam, the Netherlands.
// Copyright (c) 2008-2012 Bruno Lalande, Paris, France.
// Copyright (c) 2009-2012 Mateusz Loskot, London, UK.
// Copyright (c) 2014 Adam Wulkiewicz, Lodz, Poland.
// Parts of Boost.Geometry are redesigned from Geodan's Geographic Library
// (geolib/GGL), copyright (c) 1995-2010 Geodan, Amsterdam, the Netherlands.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_GEOMETRY_ALGORITHMS_CORRECT_HPP
#define BOOST_GEOMETRY_ALGORITHMS_CORRECT_HPP
#include <algorithm>
#include <cstddef>
#include <functional>
#include <boost/mpl/assert.hpp>
#include <boost/range.hpp>
#include <boost/type_traits/remove_reference.hpp>
#include <boost/variant/apply_visitor.hpp>
#include <boost/variant/static_visitor.hpp>
#include <boost/variant/variant_fwd.hpp>
#include <boost/geometry/algorithms/detail/interior_iterator.hpp>
#include <boost/geometry/core/closure.hpp>
#include <boost/geometry/core/cs.hpp>
#include <boost/geometry/core/exterior_ring.hpp>
#include <boost/geometry/core/interior_rings.hpp>
#include <boost/geometry/core/mutable_range.hpp>
#include <boost/geometry/core/ring_type.hpp>
#include <boost/geometry/core/tags.hpp>
#include <boost/geometry/geometries/concepts/check.hpp>
#include <boost/geometry/algorithms/area.hpp>
#include <boost/geometry/algorithms/disjoint.hpp>
#include <boost/geometry/algorithms/detail/multi_modify.hpp>
#include <boost/geometry/util/order_as_direction.hpp>
namespace boost { namespace geometry
{
// Silence warning C4127: conditional expression is constant
#if defined(_MSC_VER)
#pragma warning(push)
#pragma warning(disable : 4127)
#endif
#ifndef DOXYGEN_NO_DETAIL
namespace detail { namespace correct
{
template <typename Geometry>
struct correct_nop
{
static inline void apply(Geometry& )
{}
};
template <typename Box, std::size_t Dimension, std::size_t DimensionCount>
struct correct_box_loop
{
typedef typename coordinate_type<Box>::type coordinate_type;
static inline void apply(Box& box)
{
if (get<min_corner, Dimension>(box) > get<max_corner, Dimension>(box))
{
// Swap the coordinates
coordinate_type max_value = get<min_corner, Dimension>(box);
coordinate_type min_value = get<max_corner, Dimension>(box);
set<min_corner, Dimension>(box, min_value);
set<max_corner, Dimension>(box, max_value);
}
correct_box_loop
<
Box, Dimension + 1, DimensionCount
>::apply(box);
}
};
template <typename Box, std::size_t DimensionCount>
struct correct_box_loop<Box, DimensionCount, DimensionCount>
{
static inline void apply(Box& )
{}
};
// Correct a box: make min/max correct
template <typename Box>
struct correct_box
{
static inline void apply(Box& box)
{
// Currently only for Cartesian coordinates
// (or spherical without crossing dateline)
// Future version: adapt using strategies
correct_box_loop
<
Box, 0, dimension<Box>::type::value
>::apply(box);
}
};
// Close a ring, if not closed
template <typename Ring, typename Predicate>
struct correct_ring
{
typedef typename point_type<Ring>::type point_type;
typedef typename coordinate_type<Ring>::type coordinate_type;
typedef typename strategy::area::services::default_strategy
<
typename cs_tag<point_type>::type,
point_type
>::type strategy_type;
typedef detail::area::ring_area
<
<|fim▁hole|>
static inline void apply(Ring& r)
{
// Check close-ness
if (boost::size(r) > 2)
{
// check if closed, if not, close it
bool const disjoint = geometry::disjoint(*boost::begin(r), *(boost::end(r) - 1));
closure_selector const s = geometry::closure<Ring>::value;
if (disjoint && (s == closed))
{
geometry::append(r, *boost::begin(r));
}
if (! disjoint && s != closed)
{
// Open it by removing last point
geometry::traits::resize<Ring>::apply(r, boost::size(r) - 1);
}
}
// Check area
Predicate predicate;
typedef typename default_area_result<Ring>::type area_result_type;
area_result_type const zero = area_result_type();
if (predicate(ring_area_type::apply(r, strategy_type()), zero))
{
std::reverse(boost::begin(r), boost::end(r));
}
}
};
// Correct a polygon: normalizes all rings, sets outer ring clockwise, sets all
// inner rings counter clockwise (or vice versa depending on orientation)
template <typename Polygon>
struct correct_polygon
{
typedef typename ring_type<Polygon>::type ring_type;
typedef typename default_area_result<Polygon>::type area_result_type;
static inline void apply(Polygon& poly)
{
correct_ring
<
ring_type,
std::less<area_result_type>
>::apply(exterior_ring(poly));
typename interior_return_type<Polygon>::type
rings = interior_rings(poly);
for (typename detail::interior_iterator<Polygon>::type
it = boost::begin(rings); it != boost::end(rings); ++it)
{
correct_ring
<
ring_type,
std::greater<area_result_type>
>::apply(*it);
}
}
};
}} // namespace detail::correct
#endif // DOXYGEN_NO_DETAIL
#ifndef DOXYGEN_NO_DISPATCH
namespace dispatch
{
template <typename Geometry, typename Tag = typename tag<Geometry>::type>
struct correct: not_implemented<Tag>
{};
template <typename Point>
struct correct<Point, point_tag>
: detail::correct::correct_nop<Point>
{};
template <typename LineString>
struct correct<LineString, linestring_tag>
: detail::correct::correct_nop<LineString>
{};
template <typename Segment>
struct correct<Segment, segment_tag>
: detail::correct::correct_nop<Segment>
{};
template <typename Box>
struct correct<Box, box_tag>
: detail::correct::correct_box<Box>
{};
template <typename Ring>
struct correct<Ring, ring_tag>
: detail::correct::correct_ring
<
Ring,
std::less<typename default_area_result<Ring>::type>
>
{};
template <typename Polygon>
struct correct<Polygon, polygon_tag>
: detail::correct::correct_polygon<Polygon>
{};
template <typename MultiPoint>
struct correct<MultiPoint, multi_point_tag>
: detail::correct::correct_nop<MultiPoint>
{};
template <typename MultiLineString>
struct correct<MultiLineString, multi_linestring_tag>
: detail::correct::correct_nop<MultiLineString>
{};
template <typename Geometry>
struct correct<Geometry, multi_polygon_tag>
: detail::multi_modify
<
Geometry,
detail::correct::correct_polygon
<
typename boost::range_value<Geometry>::type
>
>
{};
} // namespace dispatch
#endif // DOXYGEN_NO_DISPATCH
namespace resolve_variant {
template <typename Geometry>
struct correct
{
static inline void apply(Geometry& geometry)
{
concepts::check<Geometry const>();
dispatch::correct<Geometry>::apply(geometry);
}
};
template <BOOST_VARIANT_ENUM_PARAMS(typename T)>
struct correct<boost::variant<BOOST_VARIANT_ENUM_PARAMS(T)> >
{
struct visitor: boost::static_visitor<void>
{
template <typename Geometry>
void operator()(Geometry& geometry) const
{
correct<Geometry>::apply(geometry);
}
};
static inline void
apply(boost::variant<BOOST_VARIANT_ENUM_PARAMS(T)>& geometry)
{
boost::apply_visitor(visitor(), geometry);
}
};
} // namespace resolve_variant
/*!
\brief Corrects a geometry
\details Corrects a geometry: all rings which are wrongly oriented with respect
to their expected orientation are reversed. To all rings which do not have a
closing point and are typed as they should have one, the first point is
appended. Also boxes can be corrected.
\ingroup correct
\tparam Geometry \tparam_geometry
\param geometry \param_geometry which will be corrected if necessary
\qbk{[include reference/algorithms/correct.qbk]}
*/
template <typename Geometry>
inline void correct(Geometry& geometry)
{
resolve_variant::correct<Geometry>::apply(geometry);
}
#if defined(_MSC_VER)
#pragma warning(pop)
#endif
}} // namespace boost::geometry
#endif // BOOST_GEOMETRY_ALGORITHMS_CORRECT_HPP<|fim▁end|>
|
order_as_direction<geometry::point_order<Ring>::value>::value,
geometry::closure<Ring>::value
> ring_area_type;
|
<|file_name|>nested_structure_coder_test.py<|end_file_name|><|fim▁begin|><|fim▁hole|># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nested structure coding."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from google.protobuf import text_format
from tensorflow.core.protobuf import struct_pb2
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops.numpy_ops import np_arrays
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow.python.saved_model import nested_structure_coder
class NestedStructureTest(test.TestCase):
def setUp(self):
super(NestedStructureTest, self).setUp()
self._coder = nested_structure_coder.StructureCoder()
def testEncodeDecodeList(self):
structure = [1.5, 2.5, 3.0]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected.list_value.values.add().float64_value = 1.5
expected.list_value.values.add().float64_value = 2.5
expected.list_value.values.add().float64_value = 3.0
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeTuple(self):
structure = ("hello", [3, (2, 1)])
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected.tuple_value.values.add().string_value = "hello"
list_value = expected.tuple_value.values.add().list_value
list_value.values.add().int64_value = 3
tuple_value = list_value.values.add().tuple_value
tuple_value.values.add().int64_value = 2
tuple_value.values.add().int64_value = 1
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeDict(self):
structure = dict(a=3, b=[7, 2.5])
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected.dict_value.fields["a"].int64_value = 3
list_value = expected.dict_value.fields["b"].list_value
list_value.values.add().int64_value = 7
list_value.values.add().float64_value = 2.5
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertIsInstance(decoded["a"], int)
self.assertEqual(structure, decoded)
def testEncodeDecodeTensorShape(self):
structure = [tensor_shape.TensorShape([1, 2, 3]), "hello"]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected_list = expected.list_value
expected_tensor_shape = expected_list.values.add().tensor_shape_value
expected_tensor_shape.dim.add().size = 1
expected_tensor_shape.dim.add().size = 2
expected_tensor_shape.dim.add().size = 3
expected_tensor_shape = expected_list.values.add().string_value = "hello"
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeNamedTuple(self):
named_tuple_type = collections.namedtuple("NamedTuple", ["x", "y"])
named_tuple = named_tuple_type(x=[1, 2], y="hello")
self.assertTrue(self._coder.can_encode(named_tuple))
encoded = self._coder.encode_structure(named_tuple)
expected = struct_pb2.StructuredValue()
expected_named_tuple = expected.named_tuple_value
expected_named_tuple.name = "NamedTuple"
key_value_pair = expected_named_tuple.values.add()
key_value_pair.key = "x"
list_value = key_value_pair.value.list_value
list_value.values.add().int64_value = 1
list_value.values.add().int64_value = 2
key_value_pair = expected_named_tuple.values.add()
key_value_pair.key = "y"
key_value_pair.value.string_value = "hello"
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(named_tuple._asdict(), decoded._asdict())
self.assertEqual(named_tuple.__class__.__name__, decoded.__class__.__name__)
def testNone(self):
structure = [1.0, None]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected.list_value.values.add().float64_value = 1.0
expected.list_value.values.add().none_value.CopyFrom(struct_pb2.NoneValue())
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testBool(self):
structure = [False]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected.list_value.values.add().bool_value = False
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEmptyStructures(self):
structure = [list(), dict(), tuple()]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected.list_value.values.add().list_value.CopyFrom(struct_pb2.ListValue())
expected.list_value.values.add().dict_value.CopyFrom(struct_pb2.DictValue())
expected.list_value.values.add().tuple_value.CopyFrom(
struct_pb2.TupleValue())
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testDtype(self):
structure = [dtypes.int64]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
list_value = expected.list_value.values.add()
list_value.tensor_dtype_value = dtypes.int64.as_datatype_enum
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeTensorSpec(self):
structure = [tensor_spec.TensorSpec([1, 2, 3], dtypes.int64, "hello")]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected_list = expected.list_value
expected_tensor_spec = expected_list.values.add().tensor_spec_value
expected_tensor_spec.shape.dim.add().size = 1
expected_tensor_spec.shape.dim.add().size = 2
expected_tensor_spec.shape.dim.add().size = 3
expected_tensor_spec.name = "hello"
expected_tensor_spec.dtype = dtypes.int64.as_datatype_enum
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeTensorSpecWithNoName(self):
structure = [tensor_spec.TensorSpec([1, 2, 3], dtypes.int64)]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected_list = expected.list_value
expected_tensor_spec = expected_list.values.add().tensor_spec_value
expected_tensor_spec.shape.dim.add().size = 1
expected_tensor_spec.shape.dim.add().size = 2
expected_tensor_spec.shape.dim.add().size = 3
expected_tensor_spec.name = ""
expected_tensor_spec.dtype = dtypes.int64.as_datatype_enum
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeRaggedTensorSpec(self):
structure = [ragged_tensor.RaggedTensorSpec(
[1, 2, 3], dtypes.int64, 2, dtypes.int32)]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected_pbtxt = r"""
list_value {
values {
type_spec_value {
type_spec_class: RAGGED_TENSOR_SPEC
type_spec_class_name: 'RaggedTensorSpec'
type_state {
tuple_value {
# spec._shape
values {
tensor_shape_value {
dim { size: 1 }
dim { size: 2 }
dim { size: 3 }
}
}
# spec._dtype
values { tensor_dtype_value: DT_INT64 }
# spec._ragged_rank
values { int64_value: 2 }
# spec._row_splits_dtype
values { tensor_dtype_value: DT_INT32 }
}
}
}
}
}
"""
expected = struct_pb2.StructuredValue()
text_format.Parse(expected_pbtxt, expected)
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeSparseTensorSpec(self):
structure = [sparse_tensor.SparseTensorSpec([10, 20], dtypes.float32)]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected_pbtxt = r"""
list_value {
values {
type_spec_value {
type_spec_class: SPARSE_TENSOR_SPEC
type_spec_class_name: 'SparseTensorSpec'
type_state {
tuple_value {
# spec._shape
values {
tensor_shape_value {
dim { size: 10 }
dim { size: 20 }
}
}
# spec._dtype
values { tensor_dtype_value: DT_FLOAT }
}
}
}
}
}
"""
expected = struct_pb2.StructuredValue()
text_format.Parse(expected_pbtxt, expected)
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testDecodeUnknownTensorSpec(self):
encoded = struct_pb2.StructuredValue()
encoded.type_spec_value.type_spec_class = 0
encoded.type_spec_value.type_spec_class_name = "FutureTensorSpec"
with self.assertRaisesRegex(ValueError,
"The type 'FutureTensorSpec' is not supported"):
self._coder.decode_proto(encoded)
def testEncodeDecodeBoundedTensorSpec(self):
structure = [
tensor_spec.BoundedTensorSpec([1, 2, 3], dtypes.int64, 0, 10,
"hello-0-10")
]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected_list = expected.list_value
expected_tensor_spec = expected_list.values.add().bounded_tensor_spec_value
expected_tensor_spec.shape.dim.add().size = 1
expected_tensor_spec.shape.dim.add().size = 2
expected_tensor_spec.shape.dim.add().size = 3
expected_tensor_spec.name = "hello-0-10"
expected_tensor_spec.dtype = dtypes.int64.as_datatype_enum
expected_tensor_spec.minimum.CopyFrom(
tensor_util.make_tensor_proto([0], dtype=dtypes.int64, shape=[]))
expected_tensor_spec.maximum.CopyFrom(
tensor_util.make_tensor_proto([10], dtype=dtypes.int64, shape=[]))
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeBoundedTensorSpecNoName(self):
structure = [
tensor_spec.BoundedTensorSpec((28, 28, 3), dtypes.float64, -2,
(1, 1, 20))
]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
expected = struct_pb2.StructuredValue()
expected_list = expected.list_value
expected_tensor_spec = expected_list.values.add().bounded_tensor_spec_value
expected_tensor_spec.shape.dim.add().size = 28
expected_tensor_spec.shape.dim.add().size = 28
expected_tensor_spec.shape.dim.add().size = 3
expected_tensor_spec.name = ""
expected_tensor_spec.dtype = dtypes.float64.as_datatype_enum
expected_tensor_spec.minimum.CopyFrom(
tensor_util.make_tensor_proto([-2], dtype=dtypes.float64, shape=[]))
expected_tensor_spec.maximum.CopyFrom(
tensor_util.make_tensor_proto([1, 1, 20],
dtype=dtypes.float64,
shape=[3]))
self.assertEqual(expected, encoded)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDataSetSpec(self):
structure = [dataset_ops.DatasetSpec(
{"rt": ragged_tensor.RaggedTensorSpec([10, None], dtypes.int32),
"st": sparse_tensor.SparseTensorSpec([10, 20], dtypes.float32),
"t": tensor_spec.TensorSpec([10, 8], dtypes.string)})]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testEncodeDecodeNdarraySpec(self):
structure = [np_arrays.NdarraySpec(
tensor_spec.TensorSpec([4, 2], dtypes.float32))]
self.assertTrue(self._coder.can_encode(structure))
encoded = self._coder.encode_structure(structure)
decoded = self._coder.decode_proto(encoded)
self.assertEqual(structure, decoded)
def testNotEncodable(self):
class NotEncodable(object):
pass
self.assertFalse(self._coder.can_encode([NotEncodable()]))
if __name__ == "__main__":
test.main()<|fim▁end|>
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
|
<|file_name|>PaginationInterceptorTest.java<|end_file_name|><|fim▁begin|>package com.baomidou.mybatisplus.test.plugins.paginationInterceptor;
import java.io.Reader;
import java.sql.Connection;
import java.util.List;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.session.RowBounds;
import org.apache.ibatis.session.SqlSession;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.baomidou.mybatisplus.plugins.Page;
import com.baomidou.mybatisplus.test.plugins.RandomUtils;
import com.baomidou.mybatisplus.test.plugins.paginationInterceptor.entity.PageUser;
import com.baomidou.mybatisplus.test.plugins.paginationInterceptor.mapper.PageUserMapper;
import com.baomidou.mybatisplus.test.plugins.paginationInterceptor.service.PageUserService;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "/plugins/paginationInterceptor.xml" })
public class PaginationInterceptorTest {
@Autowired
private SqlSessionTemplate sqlSessionTemplate;
@Autowired
private PageUserService pageUserService;
@Autowired
private PageUserMapper pageUserMapper;
private int current;
private int size;
@Before
public void setUp() throws Exception {
SqlSession session = sqlSessionTemplate.getSqlSessionFactory().openSession();
Connection conn = session.getConnection();
Reader reader = Resources.getResourceAsReader("com/baomidou/mybatisplus/test/plugins/paginationInterceptor/CreateDB.sql");
ScriptRunner runner = new ScriptRunner(conn);
runner.setLogWriter(null);
runner.runScript(reader);
reader.close();
session.close();
// 随机当前页和分页大小
size = RandomUtils.nextInt(1, 50);
current = RandomUtils.nextInt(1, 200 / size);
System.err.println("当前页为:" + current + " 分页大小为" + size);
}
@Test
public void pageSimpleTest() {
// 最基础分页
Page<PageUser> page1 = new Page<>(current, size);
Page<PageUser> result1 = pageUserService.selectPage(page1);
Assert.assertTrue(!result1.getRecords().isEmpty());
}
@Test
public void pageOrderByTest() {
// 带OrderBy
Page<PageUser> page2 = new Page<>(current, size, "name");
Page<PageUser> result2 = pageUserService.selectPage(page2);
Assert.assertTrue(!result2.getRecords().isEmpty());
// 没有orderby但是设置了倒叙
Page<PageUser> page3 = new Page<>(current, size);
page3.setAsc(false);
Page<PageUser> result3 = pageUserService.selectPage(page3);
Assert.assertTrue(!result3.getRecords().isEmpty());
// 有orderby设置了倒叙
Page<PageUser> page4 = new Page<>(current, size, "name");
page3.setAsc(false);
Page<PageUser> result4 = pageUserService.selectPage(page4);
Assert.assertTrue(!result4.getRecords().isEmpty());
}
@Test
public void pageCountTest() {<|fim▁hole|> Page<PageUser> page = new Page<>(current, size);
page.setSearchCount(false);
Page<PageUser> result = pageUserService.selectPage(page);
Assert.assertTrue(result.getTotal() == 0);
}
@Test
public void rowBoundTest() {
System.err.println("测试原生RowBounds分页");
int offset = RandomUtils.nextInt(1, 190);
int limit = RandomUtils.nextInt(1,20);
RowBounds rowBounds = new RowBounds(offset, limit);
List<PageUser> result = pageUserMapper.selectPage(rowBounds, null);
Assert.assertTrue(!result.isEmpty());
}
}<|fim▁end|>
|
// 设置不count
|
<|file_name|>test_context.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.<|fim▁hole|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import openstack.common.context
from openstack.common.middleware import context
from openstack.common import test
class ContextMiddlewareTest(test.BaseTestCase):
def test_process_request(self):
req = mock.Mock()
app = mock.Mock()
options = mock.MagicMock()
ctx = mock.sentinel.context
with mock.patch.object(context.ContextMiddleware,
'make_context',
mock.Mock(return_value=ctx)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.process_request(req)
self.assertEqual(req.context, ctx)
def test_make_context(self):
app = mock.Mock()
options = mock.MagicMock()
with mock.patch.object(openstack.common.context.RequestContext,
'__init__',
mock.Mock(return_value=None)) as init:
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
init.assert_called_with(mock.sentinel.arg)
def test_make_explicit_context(self):
app = mock.Mock()
import_class = mock.Mock()
options = {'context_class': mock.sentinel.context_class}
with mock.patch('openstack.common.importutils.import_class',
mock.Mock(return_value=import_class)):
ctx_middleware = context.ContextMiddleware(app, options)
ctx_middleware.make_context(mock.sentinel.arg)
import_class.assert_called_with(mock.sentinel.arg)
class FilterFactoryTest(test.BaseTestCase):
def test_filter_factory(self):
global_conf = dict(sentinel=mock.sentinel.global_conf)
app = mock.sentinel.app
target = 'openstack.common.middleware.context.ContextMiddleware'
def check_ctx_middleware(arg_app, arg_conf):
self.assertEqual(app, arg_app)
self.assertEqual(global_conf['sentinel'], arg_conf['sentinel'])
return mock.DEFAULT
with mock.patch(target,
mock.Mock(return_value=mock.sentinel.ctx)) as mid:
mid.side_effect = check_ctx_middleware
filter = context.filter_factory(global_conf)
self.assertEqual(filter(app), mock.sentinel.ctx)<|fim▁end|>
|
#
|
<|file_name|>no_0647_palindromic_substrings.rs<|end_file_name|><|fim▁begin|>struct Solution;
impl Solution {
pub fn count_substrings(s: String) -> i32 {
let s = s.as_bytes();
let n = s.len() as i32;
let mut ans = 0;
for i in 0..(2 * n - 1) {
// i 是中心点,包括了空隙,所以还要求出对应的索引。
// [0, 0], [0, 1], [1, 1], [1, 2] ...
let mut l = i / 2;
let mut r = l + i % 2;
// 从中心点向两边扩散。
while l >= 0 && r < n && s[l as usize] == s[r as usize] {
ans += 1;
l -= 1;
r += 1;
}
}
ans
}
}
#[cfg(test)]
mod tests {<|fim▁hole|>
#[test]
fn test_count_substrings1() {
assert_eq!(Solution::count_substrings("abc".to_string()), 3);
}
#[test]
fn test_count_substrings2() {
assert_eq!(Solution::count_substrings("aaa".to_string()), 6);
}
}<|fim▁end|>
|
use super::*;
|
<|file_name|>float.js<|end_file_name|><|fim▁begin|>import Float from 'ember-advanced-form/components/float';
<|fim▁hole|><|fim▁end|>
|
export default Float;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.